86 Commits

Author SHA1 Message Date
cb0ddb1338 test fixes 2024-11-08 15:26:26 +01:00
39ec3e795c switch to thiserror completely 2024-11-08 14:53:53 +01:00
3b920cbdd8 switch to thiserror completely 2024-11-08 14:17:36 +01:00
c0b4653c01 Merge pull request 'bump CI msrv check' (#114) from bump-msrv-check into main
Reviewed-on: #114
2024-11-08 11:27:52 +01:00
f156833985 bump CI msrv check 2024-11-08 11:26:51 +01:00
9aea3dba00 Merge pull request 'bump dependencies' (#113) from bump-dependencies into main
Reviewed-on: #113
2024-11-08 11:14:04 +01:00
48247a0a87 bump thiserror and zerocopy 2024-11-08 11:13:41 +01:00
f70b957d9a Merge pull request 'docs fixes' (#112) from smaller-doc-fixes into main
Reviewed-on: #112
2024-11-07 23:28:41 +01:00
fbf953df0e docs fixes 2024-11-04 11:42:51 +01:00
f135d54364 Merge pull request 'prepare v0.12.0' (#111) from prepare-v0.12.0 into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #111
2024-09-10 17:58:03 +02:00
d8b2a3dfea prepare v0.12.0
Some checks are pending
Rust/spacepackets/pipeline/head Build started...
2024-09-10 17:51:31 +02:00
448b76be91 Merge pull request 'condition code bugfix' (#110) from cfdp-cond-code-bugfix into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #110
2024-08-29 09:47:27 +02:00
027b01f00f condition code bugfix
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
2024-08-29 09:46:40 +02:00
bf15b22889 Merge pull request 'added max file segment length calculator' (#109) from file-segment-calculator into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #109
2024-08-21 14:29:16 +02:00
16f91b562d added max file segment length calculator
Some checks are pending
Rust/spacepackets/pipeline/head Build started...
2024-08-21 14:26:11 +02:00
cd77b806fe Merge pull request 'Added additional converter method' (#108) from msgs-to-user-converter-method into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #108
2024-08-21 11:20:33 +02:00
43c88da3f2 Added additional converter method
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-08-20 17:24:53 +02:00
b19a61b859 Merge pull request 'update msg to user module' (#107) from cfdp-msg-to-user-update into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #107
2024-08-20 17:17:11 +02:00
8aa957b8bb update msg to user module
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-08-20 16:56:25 +02:00
190fa1befc Merge pull request 'Added generic sequence counter module' (#106) from seq-count-module into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #106
2024-08-20 11:20:07 +02:00
175b61deca Added generic sequence counter module
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-08-20 10:57:53 +02:00
51c28b5cc6 Merge pull request 'Github MSRV version update' (#105) from github-msrv into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #105
2024-08-19 10:58:31 +02:00
45cc74daa7 Github MSRV version update
Some checks are pending
Rust/spacepackets/pipeline/head Build started...
2024-08-19 10:44:33 +02:00
191c6f8146 Merge pull request 'Bump MSRV and delegate version' (#104) from bump-msrv-delegate-version into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #104
2024-08-19 10:42:29 +02:00
5449884b2e Bump MSRV and delegate version
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-08-19 02:23:34 -06:00
9c93c76193 Merge pull request 'Update EOF PDU API' (#103) from eof-pdu-update into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #103
2024-08-19 10:18:19 +02:00
043927c7ef Update EOF PDU API
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-07-21 10:14:41 -07:00
f4dc5a0302 Merge pull request 'added new API for file data PDU' (#102) from file-data-pdu-update into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #102
2024-07-21 18:25:08 +02:00
9166faa4ae optimization
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-07-19 11:29:37 -07:00
ed808e69d4 added new API for file data PDU
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-07-19 10:41:31 -07:00
d146b6cf57 Merge pull request 'Metadata PDU creator update' (#101) from metadata-pdu-creator-update into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #101
2024-07-14 17:08:46 +02:00
ff0c9d8c70 Update and simplify Metadata PDU creator API
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-07-09 16:30:48 +02:00
c40bc855a2 Merge pull request 'add owned TLV type' (#98) from cfdp-tlv-owned-type into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #98
2024-07-09 16:08:53 +02:00
81423fc6e8 add owned TLV type
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
Rust/spacepackets/pipeline/pr-main Build queued...
2024-07-09 16:04:08 +02:00
a399b11a8e Merge pull request 'update documentation build' (#99) from update-docs-build into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #99
2024-07-03 16:14:46 +02:00
9d4c7446a3 Merge branch 'main' into update-docs-build
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-06-25 16:20:08 +02:00
b87f7d73b1 Merge pull request 'clippy fix' (#100) from clippy-fix into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #100
2024-06-25 16:20:01 +02:00
80744eea16 clippy fix
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-06-25 16:19:30 +02:00
a5918bfd4a update documentation build
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-06-25 16:07:07 +02:00
0e347b0e37 Merge pull request 'Bump MSRV' (#97) from bump-msrv into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #97
2024-05-19 13:07:12 +02:00
58dabb6f2f specify exact required version
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 09:13:12 +02:00
7fd65aa592 bumped MSRV
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 09:12:39 +02:00
0024afc83e Merge pull request 'prep patch release' (#96) from prep-v0.11.2 into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #96
2024-05-19 09:02:46 +02:00
c48bd848d3 prep patch release
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 08:49:03 +02:00
b8be9ae641 Merge pull request 'Fixes for Miri' (#95) from fixes-for-miri into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #95
2024-05-15 13:03:24 +02:00
c2506dbba9 Merge branch 'main' into fixes-for-miri
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-05-14 19:25:07 +02:00
b842b9d11a Merge pull request 'remove defmt::Format impl for MetadataPduCreator' (#94) from fix-defmt-derives into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #94
2024-05-14 19:24:57 +02:00
374c034e92 add miri chapter in README
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-14 15:37:20 +02:00
791c7f6e02 it is now possible to run cargo miri 2024-05-14 15:34:40 +02:00
8001938507 remove defmt::Format impl for MetadataPduCreator
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-14 15:01:26 +02:00
73ab7ff148 Merge pull request 'add doctests to github CI' (#93) from github-ci-doctest into main
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
Reviewed-on: #93
2024-05-02 14:56:13 +02:00
c59d01174f add doctests to github CI
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
2024-05-02 14:48:31 +02:00
eb49bff0c9 Merge pull request 'update github CI' (#92) from update-github-ci into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #92
2024-05-02 14:29:53 +02:00
af392d40d0 this might work
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
Rust/spacepackets/pipeline/pr-main Build queued...
2024-05-02 14:22:03 +02:00
b78bfe2114 some fixes
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
Rust/spacepackets/pipeline/pr-main Build queued...
2024-05-02 14:16:20 +02:00
69a3b1d8f3 update github CI
Some checks are pending
Rust/spacepackets/pipeline/pr-main Build queued...
Rust/spacepackets/pipeline/head Build started...
2024-05-02 14:12:26 +02:00
e7b3ba9575 Merge pull request 'date correction' (#91) from date-correction into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #91
2024-04-22 10:19:19 +02:00
c515535ccd date correction
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
2024-04-22 10:18:35 +02:00
95158a8cd2 Merge pull request 'prepare next patch version' (#90) from small-improvements-and-fixes into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #90
2024-04-22 10:15:21 +02:00
8b1ccb0cd0 prepare next patch version 2024-04-20 10:42:36 +02:00
619b22e58f Merge pull request 'prepare v0.11.0' (#89) from prep_v0.11.0 into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #89
2024-04-16 19:23:17 +02:00
55222d92b3 small typo fix
Some checks are pending
Rust/spacepackets/pipeline/head Build started...
2024-04-16 19:17:17 +02:00
8e1934e604 prepare release v0.11.0
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-16 19:15:04 +02:00
5f37978c56 Merge pull request 'added small defmt test' (#88) from added-small-defmt-test into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #88
2024-04-16 15:34:41 +02:00
97bbb14168 Merge branch 'main' into added-small-defmt-test 2024-04-16 15:34:34 +02:00
a65a98f43f Merge pull request 'clippy and msrv fix' (#87) from ci-github-fixes into main
Reviewed-on: #87
2024-04-16 15:34:27 +02:00
e1a200e65b Merge branch 'main' into added-small-defmt-test
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-04-16 15:14:23 +02:00
b55c7db3fc clippy and msrv fix
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-04-16 15:13:43 +02:00
944bcf1320 Merge pull request 'bump MSRV' (#86) from bump-msrv into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #86
2024-04-13 18:48:45 +02:00
8972dcbfc0 bump MSRV
Some checks failed
Rust/spacepackets/pipeline/pr-main There was a failure building this commit
Rust/spacepackets/pipeline/head This commit looks good
2024-04-13 17:39:53 +02:00
04b671fa6f Merge pull request 'moved CCSDS constant' (#85) from move-ccsds-constant into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #85
2024-04-13 17:19:15 +02:00
533afc33fa moved CCSDS constant
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-13 12:10:32 +02:00
50c56f6504 added small defmt test
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-04 16:35:22 +02:00
9e02e00d1a Merge pull request 'prepare next release candidate' (#84) from prep_v0.11.0-rc.2 into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #84
2024-04-04 14:21:40 +02:00
d8676ae711 prepare next release candidate
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-04 14:12:33 +02:00
9711159969 Merge pull request 'use cargo nextest in CI for testing' (#83) from use-nextest-as-test-runner-ci into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #83
2024-04-04 13:20:27 +02:00
57adb619b3 use cargo nextest in CI for testing
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-04 13:13:43 +02:00
fe52657d11 Merge pull request 'ECSS Ctors: Expect SP header by copy' (#82) from ecss-take-sp-header-by-copy into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #82
2024-04-04 12:20:45 +02:00
50b86939a1 this API is a bit more ergonomic
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-04 12:07:37 +02:00
179984f258 Merge pull request 'More smaller tweaks' (#81) from more-smaller-tweaks into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #81
2024-04-04 11:58:48 +02:00
deb89362a4 More smaller tweaks
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-04-04 11:47:39 +02:00
4cd40f37ce Merge pull request 'added additional ctors which only set the APID' (#80) from addition-sp-header-ctors into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #80
2024-04-03 22:59:19 +02:00
bbd66a6a8b added a lot of inline attrs
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-04-03 21:56:26 +02:00
0115461bb5 added additional ctors which only set the APID
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-04-03 21:30:23 +02:00
ca90393d95 Merge pull request 'unify CCSDS API as well' (#79) from unify-ccsds-api into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #79
2024-04-03 19:45:14 +02:00
325e7d6ff3 unify CCSDS API as well
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-04-03 18:47:00 +02:00
29 changed files with 2102 additions and 811 deletions

View File

@ -1,42 +1,39 @@
on: [push]
name: ci name: ci
on: [push, pull_request]
jobs: jobs:
check: check:
name: Check name: Check build
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@stable
with: - run: cargo check --release
profile: minimal
toolchain: stable
- uses: actions-rs/cargo@v1
with:
command: check
args: --release
msrv: test:
name: Check with MSRV name: Run Tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@stable
with: - name: Install nextest
toolchain: 1.61.0 uses: taiki-e/install-action@nextest
override: true - run: cargo nextest run --all-features
profile: minimal - run: cargo test --doc
- uses: actions-rs/cargo@v1
with: msrv:
command: check name: Check MSRV
args: --release runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.81.0
- run: cargo check --release
cross-check: cross-check:
name: Check Cross name: Check Cross-Compilation
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
@ -44,70 +41,32 @@ jobs:
- armv7-unknown-linux-gnueabihf - armv7-unknown-linux-gnueabihf
- thumbv7em-none-eabihf - thumbv7em-none-eabihf
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@stable
with: with:
profile: minimal targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
toolchain: stable - run: cargo check --release --target=${{matrix.target}} --no-default-features
target: ${{ matrix.target }}
override: true
- uses: actions-rs/cargo@v1
with:
use-cross: true
command: check
args: --release --target=${{ matrix.target }} --no-default-features
fmt: fmt:
name: Rustfmt name: Check formatting
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@stable
with: - run: cargo fmt --all -- --check
profile: minimal
toolchain: stable
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
check-doc: docs:
name: Check Documentation Build name: Check Documentation Build
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@nightly
with: - run: RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
toolchain: nightly
override: true
profile: minimal
- uses: actions-rs/cargo@v1
with:
command: doc
args: --all-features
clippy: clippy:
name: Clippy name: Clippy
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: dtolnay/rust-toolchain@stable
with: - run: cargo clippy -- -D warnings
profile: minimal
toolchain: stable
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: -- -D warnings
ci:
if: ${{ success() }}
# all new jobs must be added to this list
needs: [check, fmt, clippy]
runs-on: ubuntu-latest
steps:
- name: CI succeeded
run: exit 0

View File

@ -8,6 +8,93 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased] # [unreleased]
- Bumped MSRV to 1.81.0
- Bump `zerocopy` to v0.8.0
- Bump `thiserror` to v2.0.0
# [v0.12.0] 2024-09-10
- Bumped MSRV to 1.70.0
## Added
- Added new `cfdp::tlv::TlvOwned` type which erases the lifetime and is clonable.
- Dedicated `cfdp::tlv::TlvLvDataTooLarge` error struct for APIs where this is the only possible
API error.
- Added File Data PDU API which expects the expected file data size and then exposes the unwritten
file data field as a mutable slice. This allows to read data from the virtual file system
API to the file data buffer without an intermediate buffer.
- Generic `EofPdu::new` constructor.
- Added generic sequence counter module.
- Added `MsgToUserTlv::to_tlv` converter which reduced the type and converts
it to a generic `Tlv`.
- Implemented `From<MsgToUserTlv> for Tlv` converter trait.
- Added CFDP maximum file segment length calculator method `calculate_max_file_seg_len_for_max_packet_len_and_pdu_header`
## Added and Changed
- Added new `ReadableTlv` to avoid some boilerplate code and have a common abstraction implemented
for both `Tlv` and `TlvOwned` to read the raw TLV data field and its length.
- Replaced `cfdp::tlv::TlvLvError` by `cfdp::tlv::TlvLvDataTooLarge` where applicable.
## Fixed
- Fixed an error in the EOF writer which wrote the fault location to the wrong buffer position.
- cfdp `ConditionCode::CheckLimitReached` previous had the wrong numerical value of `0b1001` (9)
and now has the correct value of `0b1010` (10).
## Changed
- Minor documentation build updates.
- Increased delegate version range to v0.13
# [v0.11.2] 2024-05-19
- Bumped MSRV to 1.68.2
## Fixed
- Removed `defmt::Format` impl for `MetadataPduCreator` which seems to be problematic.
# [v0.11.1] 2024-04-22
## Fixed
- The default data length for for `SpHeader` constructors where the data field length is not
specified is now 0.
- The `SpHeader::new_from_fields` is public now.
## Added
- `SpHeader::to_vec` method.
# [v0.11.0] 2024-04-16
## Changed
- Moved `CCSDS_HEADER_LEN` constant to the crate root.
## Added
- Added `SpacePacketHeader` type alias for `SpHeader` type.
# [v0.11.0-rc.2] 2024-04-04
## Changed
- Renamed `PacketId` and `PacketSequenceCtrl` `new` method to `new_checked` and former
`new_const` method to `new`.
- Renamed `tc`, `tm`, `tc_unseg` and `tm_unseg` variants for `PacketId` and `SpHeader`
to `new_for_tc_checked`, `new_for_tm_checked`, `new_for_unseg_tc_checked` and
`new_for_unseg_tm_checked`.
- `PusTmCreator` and `PusTcCreator` now expect a regular instance of `SpHeader` instead of
a mutable reference.
## Added
- `SpHeader::new_from_apid` and `SpHeader::new_from_apid_checked` constructor.
- `#[inline]` attribute for a lot of small functions.
# [v0.11.0-rc.1] 2024-04-03 # [v0.11.0-rc.1] 2024-04-03
Major API changes for the time API. If you are using the time API, it is strongly recommended Major API changes for the time API. If you are using the time API, it is strongly recommended

View File

@ -1,8 +1,8 @@
[package] [package]
name = "spacepackets" name = "spacepackets"
version = "0.11.0-rc.1" version = "0.12.0"
edition = "2021" edition = "2021"
rust-version = "1.61" rust-version = "1.81.0"
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"] authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
description = "Generic implementations for various CCSDS and ECSS packet standards" description = "Generic implementations for various CCSDS and ECSS packet standards"
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets" homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
@ -14,15 +14,16 @@ categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-sup
[dependencies] [dependencies]
crc = "3" crc = "3"
delegate = ">=0.8, <0.11" delegate = ">=0.8, <=0.13"
paste = "1"
[dependencies.zerocopy] [dependencies.zerocopy]
version = "0.7" version = "0.8"
features = ["derive"] features = ["derive"]
[dependencies.thiserror] [dependencies.thiserror]
version = "1" version = "2"
optional = true default-features = false
[dependencies.num_enum] [dependencies.num_enum]
version = ">0.5, <=0.7" version = ">0.5, <=0.7"
@ -52,19 +53,17 @@ default-features = false
version = "0.3" version = "0.3"
optional = true optional = true
[dev-dependencies]
postcard = "1"
chrono = "0.4"
[features] [features]
default = ["std"] default = ["std"]
std = ["chrono/std", "chrono/clock", "alloc", "thiserror"] std = ["alloc", "chrono/std", "chrono/clock", "thiserror/std"]
serde = ["dep:serde", "chrono/serde"] serde = ["dep:serde", "chrono?/serde"]
alloc = ["postcard/alloc", "chrono/alloc"] alloc = ["chrono?/alloc", "defmt?/alloc", "serde?/alloc"]
chrono = ["dep:chrono"]
timelib = ["dep:time"] timelib = ["dep:time"]
defmt = ["dep:defmt"]
[dev-dependencies]
postcard = { version = "1", features = ["alloc"] }
chrono = "0.4"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
rustdoc-args = ["--cfg", "docs_rs", "--generate-link-to-definition"] rustdoc-args = ["--generate-link-to-definition"]

View File

@ -29,10 +29,6 @@ Currently, this includes the following components:
`spacepackets` supports various runtime environments and is also suitable for `no_std` environments. `spacepackets` supports various runtime environments and is also suitable for `no_std` environments.
It also offers optional support for [`serde`](https://serde.rs/). This allows serializing and
deserializing them with an appropriate `serde` provider like
[`postcard`](https://github.com/jamesmunns/postcard).
## Default features ## Default features
- [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library. - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library.
@ -65,3 +61,13 @@ cargo install grcov --locked
After that, you can simply run `coverage.py` to test the project with coverage. You can optionally After that, you can simply run `coverage.py` to test the project with coverage. You can optionally
supply the `--open` flag to open the coverage report in your webbrowser. supply the `--open` flag to open the coverage report in your webbrowser.
# Miri
You can run the [`miri`](https://github.com/rust-lang/miri) tool on this library to check for
undefined behaviour (UB). This library does not use use any `unsafe` code blocks, but `miri` could
still catch UB from used libraries.
```sh
cargo +nightly miri nextest run --all-features
```

View File

@ -15,7 +15,10 @@ RUN rustup install nightly && \
rustup target add thumbv7em-none-eabihf armv7-unknown-linux-gnueabihf && \ rustup target add thumbv7em-none-eabihf armv7-unknown-linux-gnueabihf && \
rustup component add rustfmt clippy llvm-tools-preview rustup component add rustfmt clippy llvm-tools-preview
# Get grcov
RUN curl -sSL https://github.com/mozilla/grcov/releases/download/v0.8.19/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar -xj --directory /usr/local/bin RUN curl -sSL https://github.com/mozilla/grcov/releases/download/v0.8.19/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar -xj --directory /usr/local/bin
# Get nextest
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
# SSH stuff to allow deployment to doc server # SSH stuff to allow deployment to doc server
RUN adduser --uid 114 jenkins RUN adduser --uid 114 jenkins

View File

@ -21,7 +21,9 @@ pipeline {
} }
stage('Docs') { stage('Docs') {
steps { steps {
sh 'cargo +nightly doc --all-features' sh """
RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
"""
} }
} }
stage('Rustfmt') { stage('Rustfmt') {
@ -31,7 +33,8 @@ pipeline {
} }
stage('Test') { stage('Test') {
steps { steps {
sh 'cargo test --all-features' sh 'cargo nextest r --all-features'
sh 'cargo test --doc'
} }
} }
stage('Check with all features') { stage('Check with all features') {

3
docs.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
export RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options"
cargo +nightly doc --all-features --open

View File

@ -4,11 +4,14 @@ Checklist for new releases
# Pre-Release # Pre-Release
1. Make sure any new modules are documented sufficiently enough and check docs with 1. Make sure any new modules are documented sufficiently enough and check docs with
`cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docs_rs"]' --open`. `RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features --open`
or `cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docsrs" --generate-link-to-definition"]' --open`
(was problematic on more recent nightly versions).
2. Bump version specifier in `Cargo.toml`. 2. Bump version specifier in `Cargo.toml`.
3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new 3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new
`unreleased` section. `unreleased` section.
4. Run `cargo test --all-features` or `cargo nextest r --all-features`. 4. Run `cargo test --all-features` or `cargo nextest r --all-features` together with
`cargo test --doc`.
5. Run `cargo fmt` and `cargo clippy`. Check `cargo msrv` against MSRV in `Cargo.toml`. 5. Run `cargo fmt` and `cargo clippy`. Check `cargo msrv` against MSRV in `Cargo.toml`.
6. Wait for CI/CD results for EGit and Github. These also check cross-compilation for bare-metal 6. Wait for CI/CD results for EGit and Github. These also check cross-compilation for bare-metal
targets. targets.

View File

@ -1,5 +1,4 @@
//! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8. //! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
use crate::cfdp::TlvLvError;
use crate::ByteConversionError; use crate::ByteConversionError;
use core::str::Utf8Error; use core::str::Utf8Error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
@ -7,6 +6,8 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::string::String; use std::string::String;
use super::TlvLvDataTooLargeError;
pub const MIN_LV_LEN: usize = 1; pub const MIN_LV_LEN: usize = 1;
/// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8. /// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
@ -62,9 +63,10 @@ pub(crate) fn generic_len_check_deserialization(
} }
impl<'data> Lv<'data> { impl<'data> Lv<'data> {
pub fn new(data: &[u8]) -> Result<Lv, TlvLvError> { #[inline]
pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLargeError> {
if data.len() > u8::MAX as usize { if data.len() > u8::MAX as usize {
return Err(TlvLvError::DataTooLarge(data.len())); return Err(TlvLvDataTooLargeError(data.len()));
} }
Ok(Lv { Ok(Lv {
data, data,
@ -73,6 +75,7 @@ impl<'data> Lv<'data> {
} }
/// Creates a LV with an empty value field. /// Creates a LV with an empty value field.
#[inline]
pub fn new_empty() -> Lv<'data> { pub fn new_empty() -> Lv<'data> {
Lv { Lv {
data: &[], data: &[],
@ -82,44 +85,52 @@ impl<'data> Lv<'data> {
/// Helper function to build a string LV. This is especially useful for the file or directory /// Helper function to build a string LV. This is especially useful for the file or directory
/// path LVs /// path LVs
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvError> { #[inline]
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLargeError> {
Self::new(str_slice.as_bytes()) Self::new(str_slice.as_bytes())
} }
/// Helper function to build a string LV. This is especially useful for the file or directory /// Helper function to build a string LV. This is especially useful for the file or directory
/// path LVs /// path LVs
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvError> { #[inline]
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLargeError> {
Self::new(string.as_bytes()) Self::new(string.as_bytes())
} }
/// Returns the length of the value part, not including the length byte. /// Returns the length of the value part, not including the length byte.
#[inline]
pub fn len_value(&self) -> usize { pub fn len_value(&self) -> usize {
self.data.len() self.data.len()
} }
/// Returns the full raw length, including the length byte. /// Returns the full raw length, including the length byte.
#[inline]
pub fn len_full(&self) -> usize { pub fn len_full(&self) -> usize {
self.len_value() + 1 self.len_value() + 1
} }
/// Checks whether the value field is empty. /// Checks whether the value field is empty.
#[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.data.len() == 0 self.data.len() == 0
} }
#[inline]
pub fn value(&self) -> &[u8] { pub fn value(&self) -> &[u8] {
self.data self.data
} }
/// If the LV was generated from a raw bytestream using [Self::from_bytes], the raw start /// If the LV was generated from a raw bytestream using [Self::from_bytes], the raw start
/// of the LV can be retrieved with this method. /// of the LV can be retrieved with this method.
#[inline]
pub fn raw_data(&self) -> Option<&[u8]> { pub fn raw_data(&self) -> Option<&[u8]> {
self.raw_data self.raw_data
} }
/// Convenience function to extract the value as a [str]. This is useful if the LV is /// Convenience function to extract the value as a [str]. This is useful if the LV is
/// known to contain a [str], for example being a file name. /// known to contain a [str], for example being a file name.
#[inline]
pub fn value_as_str(&self) -> Option<Result<&'data str, Utf8Error>> { pub fn value_as_str(&self) -> Option<Result<&'data str, Utf8Error>> {
if self.is_empty() { if self.is_empty() {
return None; return None;
@ -135,6 +146,7 @@ impl<'data> Lv<'data> {
} }
/// Reads a LV from a raw buffer. /// Reads a LV from a raw buffer.
#[inline]
pub fn from_bytes(buf: &'data [u8]) -> Result<Lv<'data>, ByteConversionError> { pub fn from_bytes(buf: &'data [u8]) -> Result<Lv<'data>, ByteConversionError> {
generic_len_check_deserialization(buf, MIN_LV_LEN)?; generic_len_check_deserialization(buf, MIN_LV_LEN)?;
Self::from_be_bytes_no_len_check(buf) Self::from_be_bytes_no_len_check(buf)
@ -151,6 +163,7 @@ impl<'data> Lv<'data> {
MIN_LV_LEN + self.data.len() MIN_LV_LEN + self.data.len()
} }
#[inline]
pub(crate) fn from_be_bytes_no_len_check( pub(crate) fn from_be_bytes_no_len_check(
buf: &'data [u8], buf: &'data [u8],
) -> Result<Lv<'data>, ByteConversionError> { ) -> Result<Lv<'data>, ByteConversionError> {
@ -165,10 +178,10 @@ impl<'data> Lv<'data> {
#[cfg(test)] #[cfg(test)]
pub mod tests { pub mod tests {
use super::*;
use alloc::string::ToString; use alloc::string::ToString;
use crate::cfdp::TlvLvError; use super::*;
use crate::ByteConversionError; use crate::ByteConversionError;
use std::string::String; use std::string::String;
@ -259,15 +272,11 @@ pub mod tests {
let lv = Lv::new(&data_big); let lv = Lv::new(&data_big);
assert!(lv.is_err()); assert!(lv.is_err());
let error = lv.unwrap_err(); let error = lv.unwrap_err();
if let TlvLvError::DataTooLarge(size) = error { assert_eq!(error.0, u8::MAX as usize + 1);
assert_eq!(size, u8::MAX as usize + 1); assert_eq!(
assert_eq!( error.to_string(),
error.to_string(), "data with size 256 larger than allowed 255 bytes"
"data with size 256 larger than allowed 255 bytes" );
);
} else {
panic!("invalid exception {:?}", error)
}
} }
#[test] #[test]

View File

@ -1,11 +1,8 @@
//! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf). //! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf).
use crate::ByteConversionError; use crate::ByteConversionError;
use core::fmt::{Display, Formatter};
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod lv; pub mod lv;
pub mod pdu; pub mod pdu;
@ -116,7 +113,7 @@ pub enum ConditionCode {
FileSizeError = 0b0110, FileSizeError = 0b0110,
NakLimitReached = 0b0111, NakLimitReached = 0b0111,
InactivityDetected = 0b1000, InactivityDetected = 0b1000,
CheckLimitReached = 0b1001, CheckLimitReached = 0b1010,
UnsupportedChecksumType = 0b1011, UnsupportedChecksumType = 0b1011,
/// Not an actual fault condition for which fault handler overrides can be specified /// Not an actual fault condition for which fault handler overrides can be specified
SuspendRequestReceived = 0b1110, SuspendRequestReceived = 0b1110,
@ -176,76 +173,43 @@ impl Default for ChecksumType {
pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4]; pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4];
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("data with size {0} larger than allowed {max} bytes", max = u8::MAX)]
pub struct TlvLvDataTooLargeError(pub usize);
/// First value: Found value. Second value: Expected value if there is one.
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("invalid TLV type field, found {found}, expected {expected:?}")]
pub struct InvalidTlvTypeFieldError {
found: u8,
expected: Option<u8>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum TlvLvError { pub enum TlvLvError {
DataTooLarge(usize), #[error("{0}")]
ByteConversion(ByteConversionError), DataTooLarge(#[from] TlvLvDataTooLargeError),
/// First value: Found value. Second value: Expected value if there is one. #[error("byte conversion error: {0}")]
InvalidTlvTypeField { ByteConversion(#[from] ByteConversionError),
found: u8, #[error("{0}")]
expected: Option<u8>, InvalidTlvTypeField(#[from] InvalidTlvTypeFieldError),
}, #[error("invalid value length {0}")]
/// Logically invalid value length detected. The value length may not exceed 255 bytes.
/// Depending on the concrete TLV type, the value length may also be logically invalid.
InvalidValueLength(usize), InvalidValueLength(usize),
/// Only applies to filestore requests and responses. Second name was missing where one is /// Only applies to filestore requests and responses. Second name was missing where one is
/// expected. /// expected.
#[error("second name missing for filestore request or response")]
SecondNameMissing, SecondNameMissing,
/// Invalid action code for filestore requests or responses. /// Invalid action code for filestore requests or responses.
#[error("invalid action code {0}")]
InvalidFilestoreActionCode(u8), InvalidFilestoreActionCode(u8),
} }
impl From<ByteConversionError> for TlvLvError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl Display for TlvLvError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
TlvLvError::DataTooLarge(data_len) => {
write!(
f,
"data with size {} larger than allowed {} bytes",
data_len,
u8::MAX
)
}
TlvLvError::ByteConversion(e) => {
write!(f, "tlv or lv byte conversion: {}", e)
}
TlvLvError::InvalidTlvTypeField { found, expected } => {
write!(
f,
"invalid TLV type field, found {found}, expected {expected:?}"
)
}
TlvLvError::InvalidValueLength(len) => {
write!(f, "invalid value length {len}")
}
TlvLvError::SecondNameMissing => {
write!(f, "second name missing for filestore request or response")
}
TlvLvError::InvalidFilestoreActionCode(raw) => {
write!(f, "invalid filestore action code with raw value {raw}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for TlvLvError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
TlvLvError::ByteConversion(e) => Some(e),
_ => None,
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -25,20 +25,36 @@ pub struct EofPdu {
} }
impl EofPdu { impl EofPdu {
pub fn new_no_error(mut pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self { pub fn new(
mut pdu_header: PduHeader,
condition_code: ConditionCode,
file_checksum: u32,
file_size: u64,
fault_location: Option<EntityIdTlv>,
) -> Self {
// Force correct direction flag. // Force correct direction flag.
pdu_header.pdu_conf.direction = Direction::TowardsReceiver; pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
let mut eof_pdu = Self { let mut eof_pdu = Self {
pdu_header, pdu_header,
condition_code: ConditionCode::NoError, condition_code,
file_checksum, file_checksum,
file_size, file_size,
fault_location: None, fault_location,
}; };
eof_pdu.pdu_header.pdu_datafield_len = eof_pdu.calc_pdu_datafield_len() as u16; eof_pdu.pdu_header.pdu_datafield_len = eof_pdu.calc_pdu_datafield_len() as u16;
eof_pdu eof_pdu
} }
pub fn new_no_error(pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self {
Self::new(
pdu_header,
ConditionCode::NoError,
file_checksum,
file_size,
None,
)
}
pub fn pdu_header(&self) -> &PduHeader { pub fn pdu_header(&self) -> &PduHeader {
&self.pdu_header &self.pdu_header
} }
@ -148,7 +164,7 @@ impl WritablePduPacket for EofPdu {
&mut buf[current_idx..], &mut buf[current_idx..],
)?; )?;
if let Some(fault_location) = self.fault_location { if let Some(fault_location) = self.fault_location {
current_idx += fault_location.write_to_bytes(buf)?; current_idx += fault_location.write_to_bytes(&mut buf[current_idx..])?;
} }
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
current_idx = add_pdu_crc(buf, current_idx); current_idx = add_pdu_crc(buf, current_idx);
@ -171,13 +187,23 @@ mod tests {
use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode}; use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use crate::tests::generic_serde_test; use crate::tests::generic_serde_test;
use crate::util::{UnsignedByteFieldU16, UnsignedEnum};
fn verify_state(&eof_pdu: &EofPdu, file_flag: LargeFileFlag) { fn verify_state_no_error_no_crc(eof_pdu: &EofPdu, file_flag: LargeFileFlag) {
verify_state(eof_pdu, CrcFlag::NoCrc, file_flag, ConditionCode::NoError);
}
fn verify_state(
eof_pdu: &EofPdu,
crc_flag: CrcFlag,
file_flag: LargeFileFlag,
cond_code: ConditionCode,
) {
assert_eq!(eof_pdu.file_checksum(), 0x01020304); assert_eq!(eof_pdu.file_checksum(), 0x01020304);
assert_eq!(eof_pdu.file_size(), 12); assert_eq!(eof_pdu.file_size(), 12);
assert_eq!(eof_pdu.condition_code(), ConditionCode::NoError); assert_eq!(eof_pdu.condition_code(), cond_code);
assert_eq!(eof_pdu.crc_flag(), CrcFlag::NoCrc); assert_eq!(eof_pdu.crc_flag(), crc_flag);
assert_eq!(eof_pdu.file_flag(), file_flag); assert_eq!(eof_pdu.file_flag(), file_flag);
assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective); assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective);
assert_eq!( assert_eq!(
@ -197,7 +223,7 @@ mod tests {
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4); assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4);
verify_state(&eof_pdu, LargeFileFlag::Normal); verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Normal);
} }
#[test] #[test]
@ -271,7 +297,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = EofPdu::from_bytes(&buf).unwrap_err(); let error = EofPdu::from_bytes(&buf).unwrap_err();
if let PduError::ChecksumError(e) = error { if let PduError::Checksum(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");
@ -283,7 +309,7 @@ mod tests {
let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large); let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large);
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
verify_state(&eof_pdu, LargeFileFlag::Large); verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Large);
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4); assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4);
} }
@ -295,4 +321,48 @@ mod tests {
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
generic_serde_test(eof_pdu); generic_serde_test(eof_pdu);
} }
fn generic_test_with_fault_location_and_error(crc: CrcFlag) {
let pdu_conf = common_pdu_conf(crc, LargeFileFlag::Normal);
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new(
pdu_header,
ConditionCode::FileChecksumFailure,
0x01020304,
12,
Some(EntityIdTlv::new(UnsignedByteFieldU16::new(5).into())),
);
let mut expected_len = pdu_header.header_len() + 2 + 4 + 4 + 4;
if crc == CrcFlag::WithCrc {
expected_len += 2;
}
// Entity ID TLV increaes length by 4.
assert_eq!(eof_pdu.len_written(), expected_len);
verify_state(
&eof_pdu,
crc,
LargeFileFlag::Normal,
ConditionCode::FileChecksumFailure,
);
let eof_vec = eof_pdu.to_vec().unwrap();
let eof_read_back = EofPdu::from_bytes(&eof_vec);
if let Err(e) = eof_read_back {
panic!("deserialization failed with: {e}")
}
let eof_read_back = eof_read_back.unwrap();
assert_eq!(eof_read_back, eof_pdu);
assert!(eof_read_back.fault_location.is_some());
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().value(), 5);
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().size(), 2);
}
#[test]
fn test_with_fault_location_and_error() {
generic_test_with_fault_location_and_error(CrcFlag::NoCrc);
}
#[test]
fn test_with_fault_location_and_error_and_crc() {
generic_test_with_fault_location_and_error(CrcFlag::WithCrc);
}
} }

View File

@ -92,16 +92,67 @@ impl<'seg_meta> SegmentMetadata<'seg_meta> {
} }
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
struct FdPduBase<'seg_meta> {
pdu_header: PduHeader,
#[cfg_attr(feature = "serde", serde(borrow))]
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
offset: u64,
}
impl CfdpPdu for FdPduBase<'_> {
fn pdu_header(&self) -> &PduHeader {
&self.pdu_header
}
fn file_directive_type(&self) -> Option<FileDirectiveType> {
None
}
}
impl FdPduBase<'_> {
fn calc_pdu_datafield_len(&self, file_data_len: u64) -> usize {
let mut len = core::mem::size_of::<u32>();
if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large {
len += 4;
}
if self.segment_metadata.is_some() {
len += self.segment_metadata.as_ref().unwrap().written_len()
}
len += file_data_len as usize;
if self.crc_flag() == CrcFlag::WithCrc {
len += 2;
}
len
}
fn write_common_fields_to_bytes(&self, buf: &mut [u8]) -> Result<usize, PduError> {
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
if self.segment_metadata.is_some() {
current_idx += self
.segment_metadata
.as_ref()
.unwrap()
.write_to_bytes(&mut buf[current_idx..])?;
}
current_idx += write_fss_field(
self.pdu_header.common_pdu_conf().file_flag,
self.offset,
&mut buf[current_idx..],
)?;
Ok(current_idx)
}
}
/// File Data PDU abstraction. /// File Data PDU abstraction.
/// ///
/// For more information, refer to CFDP chapter 5.3. /// For more information, refer to CFDP chapter 5.3.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct FileDataPdu<'seg_meta, 'file_data> { pub struct FileDataPdu<'seg_meta, 'file_data> {
pdu_header: PduHeader,
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
segment_metadata: Option<SegmentMetadata<'seg_meta>>, common: FdPduBase<'seg_meta>,
offset: u64,
file_data: &'file_data [u8], file_data: &'file_data [u8],
} }
@ -134,42 +185,34 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present; pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
} }
let mut pdu = Self { let mut pdu = Self {
pdu_header, common: FdPduBase {
segment_metadata, pdu_header,
offset, segment_metadata,
offset,
},
file_data, file_data,
}; };
pdu.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16; pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
pdu pdu
} }
fn calc_pdu_datafield_len(&self) -> usize { fn calc_pdu_datafield_len(&self) -> usize {
let mut len = core::mem::size_of::<u32>(); self.common
if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large { .calc_pdu_datafield_len(self.file_data.len() as u64)
len += 4; }
}
if self.segment_metadata.is_some() { pub fn segment_metadata(&self) -> Option<&SegmentMetadata> {
len += self.segment_metadata.as_ref().unwrap().written_len() self.common.segment_metadata.as_ref()
}
len += self.file_data.len();
if self.crc_flag() == CrcFlag::WithCrc {
len += 2;
}
len
} }
pub fn offset(&self) -> u64 { pub fn offset(&self) -> u64 {
self.offset self.common.offset
} }
pub fn file_data(&self) -> &'file_data [u8] { pub fn file_data(&self) -> &'file_data [u8] {
self.file_data self.file_data
} }
pub fn segment_metadata(&self) -> Option<&SegmentMetadata> {
self.segment_metadata.as_ref()
}
pub fn from_bytes<'buf: 'seg_meta + 'file_data>(buf: &'buf [u8]) -> Result<Self, PduError> { pub fn from_bytes<'buf: 'seg_meta + 'file_data>(buf: &'buf [u8]) -> Result<Self, PduError> {
let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?; let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?;
let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?; let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?;
@ -190,16 +233,18 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
.into()); .into());
} }
Ok(Self { Ok(Self {
pdu_header, common: FdPduBase {
segment_metadata, pdu_header,
offset, segment_metadata,
offset,
},
file_data: &buf[current_idx..full_len_without_crc], file_data: &buf[current_idx..full_len_without_crc],
}) })
} }
} }
impl CfdpPdu for FileDataPdu<'_, '_> { impl CfdpPdu for FileDataPdu<'_, '_> {
fn pdu_header(&self) -> &PduHeader { fn pdu_header(&self) -> &PduHeader {
&self.pdu_header &self.common.pdu_header
} }
fn file_directive_type(&self) -> Option<FileDirectiveType> { fn file_directive_type(&self) -> Option<FileDirectiveType> {
@ -216,19 +261,8 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
} }
.into()); .into());
} }
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
if self.segment_metadata.is_some() { let mut current_idx = self.common.write_common_fields_to_bytes(buf)?;
current_idx += self
.segment_metadata
.as_ref()
.unwrap()
.write_to_bytes(&mut buf[current_idx..])?;
}
current_idx += write_fss_field(
self.pdu_header.common_pdu_conf().file_flag,
self.offset,
&mut buf[current_idx..],
)?;
buf[current_idx..current_idx + self.file_data.len()].copy_from_slice(self.file_data); buf[current_idx..current_idx + self.file_data.len()].copy_from_slice(self.file_data);
current_idx += self.file_data.len(); current_idx += self.file_data.len();
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
@ -238,10 +272,167 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
} }
fn len_written(&self) -> usize { fn len_written(&self) -> usize {
self.pdu_header.header_len() + self.calc_pdu_datafield_len() self.common.pdu_header.header_len() + self.calc_pdu_datafield_len()
} }
} }
/// File Data PDU creator abstraction.
///
/// This special creator object allows to read into the file data buffer directly. This avoids
/// the need of an additional buffer to create a file data PDU. This structure therefore
/// does not implement the regular [WritablePduPacket] trait.
///
/// For more information, refer to CFDP chapter 5.3.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct FileDataPduCreatorWithReservedDatafield<'seg_meta> {
#[cfg_attr(feature = "serde", serde(borrow))]
common: FdPduBase<'seg_meta>,
file_data_len: u64,
}
impl<'seg_meta> FileDataPduCreatorWithReservedDatafield<'seg_meta> {
pub fn new_with_seg_metadata(
pdu_header: PduHeader,
segment_metadata: SegmentMetadata<'seg_meta>,
offset: u64,
file_data_len: u64,
) -> Self {
Self::new_generic(pdu_header, Some(segment_metadata), offset, file_data_len)
}
pub fn new_no_seg_metadata(pdu_header: PduHeader, offset: u64, file_data_len: u64) -> Self {
Self::new_generic(pdu_header, None, offset, file_data_len)
}
pub fn new_generic(
mut pdu_header: PduHeader,
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
offset: u64,
file_data_len: u64,
) -> Self {
pdu_header.pdu_type = PduType::FileData;
if segment_metadata.is_some() {
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
}
let mut pdu = Self {
common: FdPduBase {
pdu_header,
segment_metadata,
offset,
},
file_data_len,
};
pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
pdu
}
fn calc_pdu_datafield_len(&self) -> usize {
self.common.calc_pdu_datafield_len(self.file_data_len)
}
pub fn len_written(&self) -> usize {
self.common.pdu_header.header_len() + self.calc_pdu_datafield_len()
}
/// This function performs a partial write by writing all data except the file data
/// and the CRC.
///
/// It returns a [FileDataPduCreatorWithUnwrittenData] which provides a mutable slice to
/// the reserved file data field. The user can read file data into this field directly and
/// then finish the PDU creation using the [FileDataPduCreatorWithUnwrittenData::finish] call.
pub fn write_to_bytes_partially<'buf>(
&self,
buf: &'buf mut [u8],
) -> Result<FileDataPduCreatorWithUnwrittenData<'buf>, PduError> {
if buf.len() < self.len_written() {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: self.len_written(),
}
.into());
}
let mut current_idx = self.common.write_common_fields_to_bytes(buf)?;
let file_data_offset = current_idx as u64;
current_idx += self.file_data_len as usize;
if self.crc_flag() == CrcFlag::WithCrc {
current_idx += 2;
}
Ok(FileDataPduCreatorWithUnwrittenData {
write_buf: &mut buf[0..current_idx],
file_data_offset,
file_data_len: self.file_data_len,
needs_crc: self.crc_flag() == CrcFlag::WithCrc,
})
}
}
impl CfdpPdu for FileDataPduCreatorWithReservedDatafield<'_> {
fn pdu_header(&self) -> &PduHeader {
&self.common.pdu_header
}
fn file_directive_type(&self) -> Option<FileDirectiveType> {
None
}
}
/// This structure is created with [FileDataPduCreatorWithReservedDatafield::write_to_bytes_partially]
/// and provides an API to read file data from the virtual filesystem into the file data PDU buffer
/// directly.
///
/// This structure provides a mutable slice to the reserved file data field. The user can read
/// file data into this field directly and then finish the PDU creation using the
/// [FileDataPduCreatorWithUnwrittenData::finish] call.
pub struct FileDataPduCreatorWithUnwrittenData<'buf> {
write_buf: &'buf mut [u8],
file_data_offset: u64,
file_data_len: u64,
needs_crc: bool,
}
impl FileDataPduCreatorWithUnwrittenData<'_> {
pub fn file_data_field_mut(&mut self) -> &mut [u8] {
&mut self.write_buf[self.file_data_offset as usize
..self.file_data_offset as usize + self.file_data_len as usize]
}
/// This functio needs to be called to add a CRC to the file data PDU where applicable.
///
/// It returns the full written size of the PDU.
pub fn finish(self) -> usize {
if self.needs_crc {
add_pdu_crc(
self.write_buf,
self.file_data_offset as usize + self.file_data_len as usize,
);
}
self.write_buf.len()
}
}
/// This function can be used to calculate the maximum allowed file segment size for
/// a given maximum packet length and the segment metadata if there is any.
pub fn calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(
pdu_header: &PduHeader,
max_packet_len: usize,
segment_metadata: Option<&SegmentMetadata>,
) -> usize {
let mut subtract = pdu_header.header_len();
if segment_metadata.is_some() {
subtract += 1 + segment_metadata.as_ref().unwrap().metadata().unwrap().len();
}
if pdu_header.common_pdu_conf().file_flag == LargeFileFlag::Large {
subtract += 8;
} else {
subtract += 4;
}
if pdu_header.common_pdu_conf().crc_flag == CrcFlag::WithCrc {
subtract += 2;
}
max_packet_len.saturating_sub(subtract)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -263,7 +454,7 @@ mod tests {
assert!(fd_pdu.segment_metadata().is_none()); assert!(fd_pdu.segment_metadata().is_none());
assert_eq!( assert_eq!(
fd_pdu.len_written(), fd_pdu.len_written(),
fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4 fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4
); );
assert_eq!(fd_pdu.crc_flag(), CrcFlag::NoCrc); assert_eq!(fd_pdu.crc_flag(), CrcFlag::NoCrc);
@ -290,11 +481,11 @@ mod tests {
let written = res.unwrap(); let written = res.unwrap();
assert_eq!( assert_eq!(
written, written,
fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4 fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4
); );
let mut current_idx = fd_pdu.pdu_header.header_len(); let mut current_idx = fd_pdu.pdu_header().header_len();
let file_size = u32::from_be_bytes( let file_size = u32::from_be_bytes(
buf[fd_pdu.pdu_header.header_len()..fd_pdu.pdu_header.header_len() + 4] buf[fd_pdu.pdu_header().header_len()..fd_pdu.pdu_header().header_len() + 4]
.try_into() .try_into()
.unwrap(), .unwrap(),
); );
@ -353,7 +544,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16;
let error = FileDataPdu::from_bytes(&buf).unwrap_err(); let error = FileDataPdu::from_bytes(&buf).unwrap_err();
if let PduError::ChecksumError(e) = error { if let PduError::Checksum(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");
@ -380,7 +571,7 @@ mod tests {
assert_eq!(*fd_pdu.segment_metadata().unwrap(), segment_meta); assert_eq!(*fd_pdu.segment_metadata().unwrap(), segment_meta);
assert_eq!( assert_eq!(
fd_pdu.len_written(), fd_pdu.len_written(),
fd_pdu.pdu_header.header_len() fd_pdu.pdu_header().header_len()
+ 1 + 1
+ seg_metadata.len() + seg_metadata.len()
+ core::mem::size_of::<u32>() + core::mem::size_of::<u32>()
@ -390,7 +581,7 @@ mod tests {
fd_pdu fd_pdu
.write_to_bytes(&mut buf) .write_to_bytes(&mut buf)
.expect("writing FD PDU failed"); .expect("writing FD PDU failed");
let mut current_idx = fd_pdu.pdu_header.header_len(); let mut current_idx = fd_pdu.pdu_header().header_len();
assert_eq!( assert_eq!(
RecordContinuationState::try_from((buf[current_idx] >> 6) & 0b11).unwrap(), RecordContinuationState::try_from((buf[current_idx] >> 6) & 0b11).unwrap(),
RecordContinuationState::StartAndEnd RecordContinuationState::StartAndEnd
@ -482,4 +673,142 @@ mod tests {
let output_converted_back: FileDataPdu = from_bytes(&output).unwrap(); let output_converted_back: FileDataPdu = from_bytes(&output).unwrap();
assert_eq!(output_converted_back, fd_pdu); assert_eq!(output_converted_back, fd_pdu);
} }
#[test]
fn test_fd_pdu_creator_with_reserved_field_no_crc() {
let common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
pdu_unwritten.finish();
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
assert_eq!(
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
"hello world!"
);
}
#[test]
fn test_fd_pdu_creator_with_reserved_field_with_crc() {
let mut common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
common_conf.crc_flag = true.into();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
pdu_unwritten.finish();
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
assert_eq!(
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
"hello world!"
);
}
#[test]
fn test_fd_pdu_creator_with_reserved_field_with_crc_without_finish_fails() {
let mut common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
common_conf.crc_flag = true.into();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
let pdu_reader_error = FileDataPdu::from_bytes(&write_buf);
assert!(pdu_reader_error.is_err());
let error = pdu_reader_error.unwrap_err();
match error {
PduError::Checksum(_) => (),
_ => {
panic!("unexpected PDU error {}", error)
}
}
}
#[test]
fn test_max_file_seg_calculator_0() {
let pdu_header = PduHeader::new_for_file_data_default(CommonPduConfig::default(), 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
53
);
}
#[test]
fn test_max_file_seg_calculator_1() {
let common_conf = CommonPduConfig {
crc_flag: CrcFlag::WithCrc,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
51
);
}
#[test]
fn test_max_file_seg_calculator_2() {
let common_conf = CommonPduConfig {
file_flag: LargeFileFlag::Large,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
49
);
}
#[test]
fn test_max_file_seg_calculator_saturating_sub() {
let common_conf = CommonPduConfig {
file_flag: LargeFileFlag::Large,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 15, None),
0
);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 14, None),
0
);
}
} }

View File

@ -4,13 +4,14 @@ use crate::cfdp::pdu::{
use crate::cfdp::tlv::{ use crate::cfdp::tlv::{
EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv, EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv,
}; };
use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType, TlvLvError}; use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType};
use crate::ByteConversionError; use crate::ByteConversionError;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{CfdpPdu, WritablePduPacket}; use super::tlv::ReadableTlv;
use super::{CfdpPdu, InvalidTlvTypeFieldError, WritablePduPacket};
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -331,22 +332,26 @@ impl<'buf> FinishedPduReader<'buf> {
// last TLV, everything else would break the whole handling of the packet // last TLV, everything else would break the whole handling of the packet
// TLVs. // TLVs.
if current_idx != full_len_without_crc { if current_idx != full_len_without_crc {
return Err(PduError::FormatError); return Err(PduError::Format);
} }
} else { } else {
return Err(TlvLvError::InvalidTlvTypeField { return Err(PduError::TlvLv(
found: tlv_type.into(), InvalidTlvTypeFieldError {
expected: Some(TlvType::FilestoreResponse.into()), found: tlv_type.into(),
} expected: Some(TlvType::FilestoreResponse.into()),
.into()); }
.into(),
));
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(TlvLvError::InvalidTlvTypeField { return Err(PduError::TlvLv(
found: raw, InvalidTlvTypeFieldError {
expected: None, found: raw,
} expected: None,
.into()); }
.into(),
));
} }
} }
} }
@ -563,7 +568,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = FinishedPduReader::new(&buf).unwrap_err(); let error = FinishedPduReader::new(&buf).unwrap_err();
if let PduError::ChecksumError(e) = error { if let PduError::Checksum(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");

View File

@ -1,3 +1,5 @@
#[cfg(feature = "alloc")]
use super::tlv::TlvOwned;
use crate::cfdp::lv::Lv; use crate::cfdp::lv::Lv;
use crate::cfdp::pdu::{ use crate::cfdp::pdu::{
add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field, add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field,
@ -11,6 +13,7 @@ use alloc::vec::Vec;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv;
use super::{CfdpPdu, WritablePduPacket}; use super::{CfdpPdu, WritablePduPacket};
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] #[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
@ -51,18 +54,26 @@ pub fn build_metadata_opts_from_vec(
build_metadata_opts_from_slice(buf, tlvs.as_slice()) build_metadata_opts_from_slice(buf, tlvs.as_slice())
} }
#[cfg(feature = "alloc")]
pub fn build_metadata_opts_from_owned_slice(tlvs: &[TlvOwned]) -> Vec<u8> {
let mut sum_vec = Vec::new();
for tlv in tlvs {
sum_vec.extend(tlv.to_vec());
}
sum_vec
}
/// Metadata PDU creator abstraction. /// Metadata PDU creator abstraction.
/// ///
/// This abstraction exposes a specialized API for creating metadata PDUs as specified in /// This abstraction exposes a specialized API for creating metadata PDUs as specified in
/// CFDP chapter 5.2.5. /// CFDP chapter 5.2.5.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> { pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> {
pdu_header: PduHeader, pdu_header: PduHeader,
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [Tlv<'opts>], options: &'opts [u8],
} }
impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'opts> { impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'opts> {
@ -86,7 +97,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [Tlv<'opts>], options: &'opts [u8],
) -> Self { ) -> Self {
Self::new( Self::new(
pdu_header, pdu_header,
@ -102,7 +113,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [Tlv<'opts>], options: &'opts [u8],
) -> Self { ) -> Self {
pdu_header.pdu_type = PduType::FileDirective; pdu_header.pdu_type = PduType::FileDirective;
pdu_header.pdu_conf.direction = Direction::TowardsReceiver; pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
@ -129,10 +140,19 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
self.dest_file_name self.dest_file_name
} }
pub fn options(&self) -> &'opts [Tlv<'opts>] { pub fn options(&self) -> &'opts [u8] {
self.options self.options
} }
/// Yield an iterator which can be used to loop through all options. Returns [None] if the
/// options field is empty.
pub fn options_iter(&self) -> OptionsIter<'_> {
OptionsIter {
opt_buf: self.options,
current_idx: 0,
}
}
fn calc_pdu_datafield_len(&self) -> usize { fn calc_pdu_datafield_len(&self) -> usize {
// One directve type octet and one byte of the directive parameter field. // One directve type octet and one byte of the directive parameter field.
let mut len = 2; let mut len = 2;
@ -143,9 +163,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
} }
len += self.src_file_name.len_full(); len += self.src_file_name.len_full();
len += self.dest_file_name.len_full(); len += self.dest_file_name.len_full();
for tlv in self.options() { len += self.options().len();
len += tlv.len_full()
}
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
len += 2; len += 2;
} }
@ -191,10 +209,8 @@ impl WritablePduPacket for MetadataPduCreator<'_, '_, '_> {
current_idx += self current_idx += self
.dest_file_name .dest_file_name
.write_to_be_bytes(&mut buf[current_idx..])?; .write_to_be_bytes(&mut buf[current_idx..])?;
for opt in self.options() { buf[current_idx..current_idx + self.options.len()].copy_from_slice(self.options);
opt.write_to_bytes(&mut buf[current_idx..current_idx + opt.len_full()])?; current_idx += self.options.len();
current_idx += opt.len_full();
}
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
current_idx = add_pdu_crc(buf, current_idx); current_idx = add_pdu_crc(buf, current_idx);
} }
@ -355,7 +371,7 @@ pub mod tests {
}; };
use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket}; use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket};
use crate::cfdp::pdu::{FileDirectiveType, PduHeader}; use crate::cfdp::pdu::{FileDirectiveType, PduHeader};
use crate::cfdp::tlv::{Tlv, TlvType}; use crate::cfdp::tlv::{ReadableTlv, Tlv, TlvOwned, TlvType, WritableTlv};
use crate::cfdp::{ use crate::cfdp::{
ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag, ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag,
SegmentationControl, TransmissionMode, SegmentationControl, TransmissionMode,
@ -365,16 +381,16 @@ pub mod tests {
const SRC_FILENAME: &str = "hello-world.txt"; const SRC_FILENAME: &str = "hello-world.txt";
const DEST_FILENAME: &str = "hello-world2.txt"; const DEST_FILENAME: &str = "hello-world2.txt";
fn generic_metadata_pdu<'opts>( fn generic_metadata_pdu(
crc_flag: CrcFlag, crc_flag: CrcFlag,
checksum_type: ChecksumType, checksum_type: ChecksumType,
closure_requested: bool, closure_requested: bool,
fss: LargeFileFlag, fss: LargeFileFlag,
opts: &'opts [Tlv], opts: &[u8],
) -> ( ) -> (
Lv<'static>, Lv<'static>,
Lv<'static>, Lv<'static>,
MetadataPduCreator<'static, 'static, 'opts>, MetadataPduCreator<'static, 'static, '_>,
) { ) {
let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0); let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0);
let metadata_params = MetadataGenericParams::new(closure_requested, checksum_type, 0x1010); let metadata_params = MetadataGenericParams::new(closure_requested, checksum_type, 0x1010);
@ -544,9 +560,9 @@ pub mod tests {
assert_eq!(written.metadata_params(), read.metadata_params()); assert_eq!(written.metadata_params(), read.metadata_params());
assert_eq!(written.src_file_name(), read.src_file_name()); assert_eq!(written.src_file_name(), read.src_file_name());
assert_eq!(written.dest_file_name(), read.dest_file_name()); assert_eq!(written.dest_file_name(), read.dest_file_name());
let opts = written.options(); let opts = written.options_iter();
for (tlv_written, tlv_read) in opts.iter().zip(read.options_iter().unwrap()) { for (tlv_written, tlv_read) in opts.zip(read.options_iter().unwrap()) {
assert_eq!(tlv_written, &tlv_read); assert_eq!(&tlv_written, &tlv_read);
} }
} }
@ -661,14 +677,14 @@ pub mod tests {
let tlv1 = Tlv::new_empty(TlvType::FlowLabel); let tlv1 = Tlv::new_empty(TlvType::FlowLabel);
let msg_to_user: [u8; 4] = [1, 2, 3, 4]; let msg_to_user: [u8; 4] = [1, 2, 3, 4];
let tlv2 = Tlv::new(TlvType::MsgToUser, &msg_to_user).unwrap(); let tlv2 = Tlv::new(TlvType::MsgToUser, &msg_to_user).unwrap();
let tlv_vec = vec![tlv1, tlv2]; let mut tlv_buf: [u8; 64] = [0; 64];
let opts_len = tlv1.len_full() + tlv2.len_full(); let opts_len = build_metadata_opts_from_slice(&mut tlv_buf, &[tlv1, tlv2]).unwrap();
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu( let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
CrcFlag::NoCrc, CrcFlag::NoCrc,
ChecksumType::Crc32, ChecksumType::Crc32,
false, false,
LargeFileFlag::Normal, LargeFileFlag::Normal,
&tlv_vec, &tlv_buf[0..opts_len],
); );
let mut buf: [u8; 128] = [0; 128]; let mut buf: [u8; 128] = [0; 128];
let write_res = metadata_pdu.write_to_bytes(&mut buf); let write_res = metadata_pdu.write_to_bytes(&mut buf);
@ -691,7 +707,55 @@ pub mod tests {
let opts_iter = opts_iter.unwrap(); let opts_iter = opts_iter.unwrap();
let mut accumulated_len = 0; let mut accumulated_len = 0;
for (idx, opt) in opts_iter.enumerate() { for (idx, opt) in opts_iter.enumerate() {
assert_eq!(tlv_vec[idx], opt); if idx == 0 {
assert_eq!(tlv1, opt);
} else if idx == 1 {
assert_eq!(tlv2, opt);
}
accumulated_len += opt.len_full();
}
assert_eq!(accumulated_len, pdu_read_back.options().len());
}
#[test]
fn test_with_owned_opts() {
let tlv1 = TlvOwned::new_empty(TlvType::FlowLabel);
let msg_to_user: [u8; 4] = [1, 2, 3, 4];
let tlv2 = TlvOwned::new(TlvType::MsgToUser, &msg_to_user);
let mut all_tlvs = tlv1.to_vec();
all_tlvs.extend(tlv2.to_vec());
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
CrcFlag::NoCrc,
ChecksumType::Crc32,
false,
LargeFileFlag::Normal,
&all_tlvs,
);
let mut buf: [u8; 128] = [0; 128];
let write_res = metadata_pdu.write_to_bytes(&mut buf);
assert!(write_res.is_ok());
let written = write_res.unwrap();
assert_eq!(
written,
metadata_pdu.pdu_header.header_len()
+ 1
+ 1
+ 4
+ src_filename.len_full()
+ dest_filename.len_full()
+ all_tlvs.len()
);
let pdu_read_back = MetadataPduReader::from_bytes(&buf).unwrap();
compare_read_pdu_to_written_pdu(&metadata_pdu, &pdu_read_back);
let opts_iter = pdu_read_back.options_iter();
assert!(opts_iter.is_some());
let opts_iter = opts_iter.unwrap();
let mut accumulated_len = 0;
for (idx, opt) in opts_iter.enumerate() {
if idx == 0 {
assert_eq!(tlv1, opt);
} else if idx == 1 {
assert_eq!(tlv2, opt);
}
accumulated_len += opt.len_full(); accumulated_len += opt.len_full();
} }
assert_eq!(accumulated_len, pdu_read_back.options().len()); assert_eq!(accumulated_len, pdu_read_back.options().len());
@ -716,7 +780,7 @@ pub mod tests {
assert_eq!(expected, Some(FileDirectiveType::MetadataPdu)); assert_eq!(expected, Some(FileDirectiveType::MetadataPdu));
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid directive type value 255, expected Some(MetadataPdu)" "invalid directive type, found 255, expected Some(MetadataPdu)"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);
@ -742,7 +806,7 @@ pub mod tests {
assert_eq!(expected, FileDirectiveType::MetadataPdu); assert_eq!(expected, FileDirectiveType::MetadataPdu);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"found directive type EofPdu, expected MetadataPdu" "wrong directive type, found EofPdu, expected MetadataPdu"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);

View File

@ -5,9 +5,6 @@ use crate::ByteConversionError;
use crate::CRC_CCITT_FALSE; use crate::CRC_CCITT_FALSE;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::{Display, Formatter};
#[cfg(feature = "std")]
use std::error::Error;
pub mod ack; pub mod ack;
pub mod eof; pub mod eof;
@ -30,135 +27,60 @@ pub enum FileDirectiveType {
KeepAlivePdu = 0x0c, KeepAlivePdu = 0x0c,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PduError { pub enum PduError {
ByteConversion(ByteConversionError), #[error("byte conversion error: {0}")]
/// Found version ID invalid, not equal to [CFDP_VERSION_2]. ByteConversion(#[from] ByteConversionError),
/// Found version ID invalid, not equal to [super::CFDP_VERSION_2].
#[error("CFDP version missmatch, found {0}, expected {ver}", ver = super::CFDP_VERSION_2)]
CfdpVersionMissmatch(u8), CfdpVersionMissmatch(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid PDU entity ID length {0}, only [1, 2, 4, 8] are allowed")]
InvalidEntityLen(u8), InvalidEntityLen(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid transaction ID length {0}")]
InvalidTransactionSeqNumLen(u8), InvalidTransactionSeqNumLen(u8),
#[error("missmatch of PDU source ID length {src_id_len} and destination ID length {dest_id_len}")]
SourceDestIdLenMissmatch { SourceDestIdLenMissmatch {
src_id_len: usize, src_id_len: usize,
dest_id_len: usize, dest_id_len: usize,
}, },
/// Wrong directive type, for example when parsing the directive field for a file directive /// Wrong directive type, for example when parsing the directive field for a file directive
/// PDU. /// PDU.
#[error("wrong directive type, found {found:?}, expected {expected:?}")]
WrongDirectiveType { WrongDirectiveType {
found: FileDirectiveType, found: FileDirectiveType,
expected: FileDirectiveType, expected: FileDirectiveType,
}, },
/// The directive type field contained a value not in the range of permitted values. This can /// The directive type field contained a value not in the range of permitted values. This can
/// also happen if an invalid value is passed to the ACK PDU constructor. /// also happen if an invalid value is passed to the ACK PDU constructor.
#[error("invalid directive type, found {found:?}, expected {expected:?}")]
InvalidDirectiveType { InvalidDirectiveType {
found: u8, found: u8,
expected: Option<FileDirectiveType>, expected: Option<FileDirectiveType>,
}, },
#[error("invalid start or end of scope value for NAK PDU")]
InvalidStartOrEndOfScopeValue, InvalidStartOrEndOfScopeValue,
/// Invalid condition code. Contains the raw detected value. /// Invalid condition code. Contains the raw detected value.
#[error("invalid condition code {0}")]
InvalidConditionCode(u8), InvalidConditionCode(u8),
/// Invalid checksum type which is not part of the checksums listed in the /// Invalid checksum type which is not part of the checksums listed in the
/// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/). /// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/).
#[error("invalid checksum type {0}")]
InvalidChecksumType(u8), InvalidChecksumType(u8),
#[error("file size {0} too large")]
FileSizeTooLarge(u64), FileSizeTooLarge(u64),
/// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC. /// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC.
ChecksumError(u16), #[error("checksum error for checksum {0}")]
Checksum(u16),
/// Generic error for invalid PDU formats. /// Generic error for invalid PDU formats.
FormatError, #[error("generic PDU format error")]
Format,
/// Error handling a TLV field. /// Error handling a TLV field.
TlvLvError(TlvLvError), #[error("PDU error: {0}")]
} TlvLv(#[from] TlvLvError),
impl Display for PduError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PduError::InvalidEntityLen(raw_id) => {
write!(
f,
"invalid PDU entity ID length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::InvalidStartOrEndOfScopeValue => {
write!(f, "invalid start or end of scope for NAK PDU")
}
PduError::InvalidTransactionSeqNumLen(raw_id) => {
write!(
f,
"invalid PDUtransaction seq num length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::CfdpVersionMissmatch(raw) => {
write!(
f,
"cfdp version missmatch, found {raw}, expected {CFDP_VERSION_2}"
)
}
PduError::SourceDestIdLenMissmatch {
src_id_len,
dest_id_len,
} => {
write!(
f,
"missmatch of PDU source length {src_id_len} and destination length {dest_id_len}"
)
}
PduError::ByteConversion(e) => {
write!(f, "{}", e)
}
PduError::FileSizeTooLarge(value) => {
write!(f, "file size value {value} exceeds allowed 32 bit width")
}
PduError::WrongDirectiveType { found, expected } => {
write!(f, "found directive type {found:?}, expected {expected:?}")
}
PduError::InvalidConditionCode(raw_code) => {
write!(f, "found invalid condition code with raw value {raw_code}")
}
PduError::InvalidDirectiveType { found, expected } => {
write!(
f,
"invalid directive type value {found}, expected {expected:?}"
)
}
PduError::InvalidChecksumType(checksum_type) => {
write!(f, "invalid checksum type {checksum_type}")
}
PduError::ChecksumError(checksum) => {
write!(f, "checksum error for CRC {checksum:#04x}")
}
PduError::TlvLvError(error) => {
write!(f, "pdu tlv error: {error}")
}
PduError::FormatError => {
write!(f, "generic PDU format error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PduError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
PduError::ByteConversion(e) => Some(e),
PduError::TlvLvError(e) => Some(e),
_ => None,
}
}
}
impl From<ByteConversionError> for PduError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl From<TlvLvError> for PduError {
fn from(e: TlvLvError) -> Self {
Self::TlvLvError(e)
}
} }
pub trait WritablePduPacket { pub trait WritablePduPacket {
@ -179,33 +101,42 @@ pub trait WritablePduPacket {
pub trait CfdpPdu { pub trait CfdpPdu {
fn pdu_header(&self) -> &PduHeader; fn pdu_header(&self) -> &PduHeader;
#[inline]
fn source_id(&self) -> UnsignedByteField { fn source_id(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().source_entity_id self.pdu_header().common_pdu_conf().source_entity_id
} }
#[inline]
fn dest_id(&self) -> UnsignedByteField { fn dest_id(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().dest_entity_id self.pdu_header().common_pdu_conf().dest_entity_id
} }
#[inline]
fn transaction_seq_num(&self) -> UnsignedByteField { fn transaction_seq_num(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().transaction_seq_num self.pdu_header().common_pdu_conf().transaction_seq_num
} }
#[inline]
fn transmission_mode(&self) -> TransmissionMode { fn transmission_mode(&self) -> TransmissionMode {
self.pdu_header().common_pdu_conf().trans_mode self.pdu_header().common_pdu_conf().trans_mode
} }
#[inline]
fn direction(&self) -> Direction { fn direction(&self) -> Direction {
self.pdu_header().common_pdu_conf().direction self.pdu_header().common_pdu_conf().direction
} }
#[inline]
fn crc_flag(&self) -> CrcFlag { fn crc_flag(&self) -> CrcFlag {
self.pdu_header().common_pdu_conf().crc_flag self.pdu_header().common_pdu_conf().crc_flag
} }
#[inline]
fn file_flag(&self) -> LargeFileFlag { fn file_flag(&self) -> LargeFileFlag {
self.pdu_header().common_pdu_conf().file_flag self.pdu_header().common_pdu_conf().file_flag
} }
#[inline]
fn pdu_type(&self) -> PduType { fn pdu_type(&self) -> PduType {
self.pdu_header().pdu_type() self.pdu_header().pdu_type()
} }
@ -234,6 +165,7 @@ pub struct CommonPduConfig {
// TODO: Builder pattern might be applicable here.. // TODO: Builder pattern might be applicable here..
impl CommonPduConfig { impl CommonPduConfig {
#[inline]
pub fn new( pub fn new(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -265,6 +197,7 @@ impl CommonPduConfig {
}) })
} }
#[inline]
pub fn new_with_byte_fields( pub fn new_with_byte_fields(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -281,10 +214,12 @@ impl CommonPduConfig {
) )
} }
#[inline]
pub fn source_id(&self) -> UnsignedByteField { pub fn source_id(&self) -> UnsignedByteField {
self.source_entity_id self.source_entity_id
} }
#[inline]
fn source_dest_id_check( fn source_dest_id_check(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -307,6 +242,7 @@ impl CommonPduConfig {
Ok((source_id, dest_id)) Ok((source_id, dest_id))
} }
#[inline]
pub fn set_source_and_dest_id( pub fn set_source_and_dest_id(
&mut self, &mut self,
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
@ -318,6 +254,7 @@ impl CommonPduConfig {
Ok(()) Ok(())
} }
#[inline]
pub fn dest_id(&self) -> UnsignedByteField { pub fn dest_id(&self) -> UnsignedByteField {
self.dest_entity_id self.dest_entity_id
} }
@ -326,6 +263,7 @@ impl CommonPduConfig {
impl Default for CommonPduConfig { impl Default for CommonPduConfig {
/// The defaults for the source ID, destination ID and the transaction sequence number is the /// The defaults for the source ID, destination ID and the transaction sequence number is the
/// [UnsignedByteFieldU8] with an intitial value of 0 /// [UnsignedByteFieldU8] with an intitial value of 0
#[inline]
fn default() -> Self { fn default() -> Self {
// The new function can not fail for these input parameters. // The new function can not fail for these input parameters.
Self::new( Self::new(
@ -342,6 +280,7 @@ impl Default for CommonPduConfig {
} }
impl PartialEq for CommonPduConfig { impl PartialEq for CommonPduConfig {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.source_entity_id.value() == other.source_entity_id.value() self.source_entity_id.value() == other.source_entity_id.value()
&& self.dest_entity_id.value() == other.dest_entity_id.value() && self.dest_entity_id.value() == other.dest_entity_id.value()
@ -370,6 +309,7 @@ pub struct PduHeader {
} }
impl PduHeader { impl PduHeader {
#[inline]
pub fn new_for_file_data( pub fn new_for_file_data(
pdu_conf: CommonPduConfig, pdu_conf: CommonPduConfig,
pdu_datafield_len: u16, pdu_datafield_len: u16,
@ -385,6 +325,7 @@ impl PduHeader {
) )
} }
#[inline]
pub fn new_for_file_data_default(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self { pub fn new_for_file_data_default(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self {
Self::new_generic( Self::new_generic(
PduType::FileData, PduType::FileData,
@ -394,6 +335,7 @@ impl PduHeader {
SegmentationControl::NoRecordBoundaryPreservation, SegmentationControl::NoRecordBoundaryPreservation,
) )
} }
#[inline]
pub fn new_no_file_data(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self { pub fn new_no_file_data(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self {
Self::new_generic( Self::new_generic(
PduType::FileDirective, PduType::FileDirective,
@ -404,6 +346,7 @@ impl PduHeader {
) )
} }
#[inline]
pub fn new_generic( pub fn new_generic(
pdu_type: PduType, pdu_type: PduType,
pdu_conf: CommonPduConfig, pdu_conf: CommonPduConfig,
@ -421,6 +364,7 @@ impl PduHeader {
} }
/// Returns only the length of the PDU header when written to a raw buffer. /// Returns only the length of the PDU header when written to a raw buffer.
#[inline]
pub fn header_len(&self) -> usize { pub fn header_len(&self) -> usize {
FIXED_HEADER_LEN FIXED_HEADER_LEN
+ self.pdu_conf.source_entity_id.size() + self.pdu_conf.source_entity_id.size()
@ -428,12 +372,14 @@ impl PduHeader {
+ self.pdu_conf.dest_entity_id.size() + self.pdu_conf.dest_entity_id.size()
} }
#[inline]
pub fn pdu_datafield_len(&self) -> usize { pub fn pdu_datafield_len(&self) -> usize {
self.pdu_datafield_len.into() self.pdu_datafield_len.into()
} }
/// Returns the full length of the PDU when written to a raw buffer, which is the header length /// Returns the full length of the PDU when written to a raw buffer, which is the header length
/// plus the PDU datafield length. /// plus the PDU datafield length.
#[inline]
pub fn pdu_len(&self) -> usize { pub fn pdu_len(&self) -> usize {
self.header_len() + self.pdu_datafield_len as usize self.header_len() + self.pdu_datafield_len as usize
} }
@ -506,7 +452,7 @@ impl PduHeader {
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&buf[..self.pdu_len()]); digest.update(&buf[..self.pdu_len()]);
if digest.finalize() != 0 { if digest.finalize() != 0 {
return Err(PduError::ChecksumError(u16::from_be_bytes( return Err(PduError::Checksum(u16::from_be_bytes(
buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(), buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(),
))); )));
} }
@ -606,10 +552,13 @@ impl PduHeader {
current_idx, current_idx,
)) ))
} }
#[inline]
pub fn pdu_type(&self) -> PduType { pub fn pdu_type(&self) -> PduType {
self.pdu_type self.pdu_type
} }
#[inline]
pub fn common_pdu_conf(&self) -> &CommonPduConfig { pub fn common_pdu_conf(&self) -> &CommonPduConfig {
&self.pdu_conf &self.pdu_conf
} }
@ -617,6 +566,8 @@ impl PduHeader {
pub fn seg_metadata_flag(&self) -> SegmentMetadataFlag { pub fn seg_metadata_flag(&self) -> SegmentMetadataFlag {
self.seg_metadata_flag self.seg_metadata_flag
} }
#[inline]
pub fn seg_ctrl(&self) -> SegmentationControl { pub fn seg_ctrl(&self) -> SegmentationControl {
self.seg_ctrl self.seg_ctrl
} }
@ -950,7 +901,7 @@ mod tests {
assert_eq!(raw_version, CFDP_VERSION_2 + 1); assert_eq!(raw_version, CFDP_VERSION_2 + 1);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"cfdp version missmatch, found 2, expected 1" "CFDP version missmatch, found 2, expected 1"
); );
} else { } else {
panic!("invalid exception: {}", error); panic!("invalid exception: {}", error);
@ -998,7 +949,7 @@ mod tests {
assert_eq!(expected, 7); assert_eq!(expected, 7);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"source slice with size 6 too small, expected at least 7 bytes" "byte conversion error: source slice with size 6 too small, expected at least 7 bytes"
); );
} }
} }
@ -1053,7 +1004,7 @@ mod tests {
assert_eq!(dest_id_len, 2); assert_eq!(dest_id_len, 2);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"missmatch of PDU source length 1 and destination length 2" "missmatch of PDU source ID length 1 and destination ID length 2"
); );
} }
} }

View File

@ -751,7 +751,7 @@ mod tests {
if let PduError::InvalidStartOrEndOfScopeValue = error { if let PduError::InvalidStartOrEndOfScopeValue = error {
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid start or end of scope for NAK PDU" "invalid start or end of scope value for NAK PDU"
); );
} else { } else {
panic!("unexpected error {error}"); panic!("unexpected error {error}");
@ -796,7 +796,7 @@ mod tests {
nak_vec[nak_pdu.len_written() - 1] -= 1; nak_vec[nak_pdu.len_written() - 1] -= 1;
let nak_pdu_deser = NakPduReader::new(&nak_vec); let nak_pdu_deser = NakPduReader::new(&nak_vec);
assert!(nak_pdu_deser.is_err()); assert!(nak_pdu_deser.is_err());
if let Err(PduError::ChecksumError(raw)) = nak_pdu_deser { if let Err(PduError::Checksum(raw)) = nak_pdu_deser {
assert_eq!( assert_eq!(
raw, raw,
u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap()) u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap())

View File

@ -9,10 +9,14 @@ use crate::ByteConversionError;
use alloc::vec; use alloc::vec;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError};
pub mod msg_to_user; pub mod msg_to_user;
pub const MIN_TLV_LEN: usize = 2; pub const MIN_TLV_LEN: usize = 2;
@ -39,6 +43,26 @@ pub trait GenericTlv {
} }
} }
pub trait ReadableTlv {
fn value(&self) -> &[u8];
/// Checks whether the value field is empty.
fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
fn len_full(&self) -> usize {
self.len_value() + 2
}
}
pub trait WritableTlv { pub trait WritableTlv {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>; fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
fn len_written(&self) -> usize; fn len_written(&self) -> usize;
@ -129,14 +153,14 @@ pub struct Tlv<'data> {
} }
impl<'data> Tlv<'data> { impl<'data> Tlv<'data> {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvError> { pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Standard(tlv_type), tlv_type_field: TlvTypeField::Standard(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
}) })
} }
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvError> { pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Custom(tlv_type), tlv_type_field: TlvTypeField::Custom(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
@ -151,31 +175,11 @@ impl<'data> Tlv<'data> {
} }
} }
pub fn value(&self) -> &[u8] {
self.lv.value()
}
/// Checks whether the value field is empty.
pub fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
pub fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
pub fn len_full(&self) -> usize {
self.len_value() + 2
}
/// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the /// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the
/// bytestream with the exact size of the expected TLV. This function will take care /// bytestream with the exact size of the expected TLV. This function will take care
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using /// of parsing the length byte, and the length of the parsed TLV can be retrieved using
/// [Self::len_full]. /// [Self::len_full].
pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, TlvLvError> { pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, ByteConversionError> {
generic_len_check_deserialization(buf, MIN_TLV_LEN)?; generic_len_check_deserialization(buf, MIN_TLV_LEN)?;
let mut tlv = Self { let mut tlv = Self {
tlv_type_field: TlvTypeField::from(buf[0]), tlv_type_field: TlvTypeField::from(buf[0]),
@ -192,6 +196,27 @@ impl<'data> Tlv<'data> {
pub fn raw_data(&self) -> Option<&[u8]> { pub fn raw_data(&self) -> Option<&[u8]> {
self.lv.raw_data() self.lv.raw_data()
} }
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
TlvOwned {
tlv_type_field: self.tlv_type_field,
data: self.value().to_vec(),
}
}
}
#[cfg(feature = "alloc")]
impl PartialEq<TlvOwned> for Tlv<'_> {
fn eq(&self, other: &TlvOwned) -> bool {
self.tlv_type_field == other.tlv_type_field && self.value() == other.value()
}
}
impl ReadableTlv for Tlv<'_> {
fn value(&self) -> &[u8] {
self.lv.value()
}
} }
impl WritableTlv for Tlv<'_> { impl WritableTlv for Tlv<'_> {
@ -212,18 +237,81 @@ impl GenericTlv for Tlv<'_> {
} }
} }
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> { #[cfg(feature = "alloc")]
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField { pub mod alloc_mod {
found: raw_type, use super::*;
expected: Some(expected_tlv_type.into()),
})?; /// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime
if tlv_type != expected_tlv_type { /// associated to a data slice.
return Err(TlvLvError::InvalidTlvTypeField { #[derive(Debug, Clone, PartialEq, Eq)]
found: tlv_type as u8, #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
expected: Some(expected_tlv_type as u8), #[cfg_attr(feature = "defmt", derive(defmt::Format))]
}); pub struct TlvOwned {
pub(crate) tlv_type_field: TlvTypeField,
pub(crate) data: Vec<u8>,
}
impl TlvOwned {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: data.to_vec(),
}
}
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Custom(tlv_type),
data: data.to_vec(),
}
}
/// Creates a TLV with an empty value field.
pub fn new_empty(tlv_type: TlvType) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: Vec::new(),
}
}
}
impl ReadableTlv for TlvOwned {
fn value(&self) -> &[u8] {
&self.data
}
}
impl WritableTlv for TlvOwned {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
generic_len_check_data_serialization(buf, self.data.len(), MIN_TLV_LEN)?;
buf[0] = self.tlv_type_field.into();
buf[1] = self.data.len() as u8;
buf[2..2 + self.data.len()].copy_from_slice(&self.data);
Ok(self.len_written())
}
fn len_written(&self) -> usize {
self.data.len() + 2
}
}
impl GenericTlv for TlvOwned {
fn tlv_type_field(&self) -> TlvTypeField {
self.tlv_type_field
}
}
impl From<Tlv<'_>> for TlvOwned {
fn from(value: Tlv<'_>) -> Self {
value.to_owned()
}
}
impl PartialEq<Tlv<'_>> for TlvOwned {
fn eq(&self, other: &Tlv) -> bool {
self.tlv_type_field == other.tlv_type_field && self.data == other.value()
}
} }
Ok(())
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -277,11 +365,8 @@ impl EntityIdTlv {
Self::len_check(buf)?; Self::len_check(buf)?;
self.entity_id self.entity_id
.write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?; .write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?;
Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).map_err(|e| match e { // Can't fail.
TlvLvError::ByteConversion(e) => e, Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap())
// All other errors are impossible.
_ => panic!("unexpected TLV error"),
})
} }
} }
@ -307,21 +392,23 @@ impl GenericTlv for EntityIdTlv {
impl<'data> TryFrom<Tlv<'data>> for EntityIdTlv { impl<'data> TryFrom<Tlv<'data>> for EntityIdTlv {
type Error = TlvLvError; type Error = TlvLvError;
fn try_from(value: Tlv) -> Result<Self, Self::Error> { fn try_from(value: Tlv) -> Result<Self, TlvLvError> {
match value.tlv_type_field { match value.tlv_type_field {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::EntityId { if tlv_type != TlvType::EntityId {
return Err(TlvLvError::InvalidTlvTypeField { return Err(InvalidTlvTypeFieldError {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
}); }
.into());
} }
} }
TlvTypeField::Custom(val) => { TlvTypeField::Custom(val) => {
return Err(TlvLvError::InvalidTlvTypeField { return Err(InvalidTlvTypeFieldError {
found: val, found: val,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
}); }
.into());
} }
} }
let len_value = value.value().len(); let len_value = value.value().len();
@ -752,6 +839,23 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
} }
} }
pub(crate) fn verify_tlv_type(
raw_type: u8,
expected_tlv_type: TlvType,
) -> Result<(), InvalidTlvTypeFieldError> {
let tlv_type = TlvType::try_from(raw_type).map_err(|_| InvalidTlvTypeFieldError {
found: raw_type,
expected: Some(expected_tlv_type.into()),
})?;
if tlv_type != expected_tlv_type {
return Err(InvalidTlvTypeFieldError {
found: tlv_type as u8,
expected: Some(expected_tlv_type as u8),
});
}
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -939,14 +1043,14 @@ mod tests {
let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large); let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large);
assert!(tlv_res.is_err()); assert!(tlv_res.is_err());
let error = tlv_res.unwrap_err(); let error = tlv_res.unwrap_err();
if let TlvLvError::DataTooLarge(size) = error { match error {
assert_eq!(size, u8::MAX as usize + 1); TlvLvDataTooLargeError(size) => {
assert_eq!( assert_eq!(size, u8::MAX as usize + 1);
error.to_string(), assert_eq!(
"data with size 256 larger than allowed 255 bytes" error.to_string(),
); "data with size 256 larger than allowed 255 bytes"
} else { );
panic!("unexpected error {:?}", error); }
} }
} }
@ -1256,7 +1360,8 @@ mod tests {
let error = EntityIdTlv::try_from(msg_to_user_tlv); let error = EntityIdTlv::try_from(msg_to_user_tlv);
assert!(error.is_err()); assert!(error.is_err());
let error = error.unwrap_err(); let error = error.unwrap_err();
if let TlvLvError::InvalidTlvTypeField { found, expected } = error { if let TlvLvError::InvalidTlvTypeField(InvalidTlvTypeFieldError { found, expected }) = error
{
assert_eq!(found, TlvType::MsgToUser as u8); assert_eq!(found, TlvType::MsgToUser as u8);
assert_eq!(expected, Some(TlvType::EntityId as u8)); assert_eq!(expected, Some(TlvType::EntityId as u8));
assert_eq!( assert_eq!(
@ -1300,4 +1405,71 @@ mod tests {
assert_eq!(tlv_as_vec[0], 20); assert_eq!(tlv_as_vec[0], 20);
assert_eq!(tlv_as_vec[1], 0); assert_eq!(tlv_as_vec[1], 0);
} }
#[test]
fn test_tlv_to_owned() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned = tlv_res.to_owned();
assert_eq!(tlv_res, tlv_owned);
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_owned_from_conversion, tlv_owned);
assert_eq!(tlv_owned_from_conversion, tlv_res);
}
#[test]
fn test_owned_tlv() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::EntityId)
);
assert_eq!(tlv_res.len_full(), 3);
assert_eq!(tlv_res.value().len(), 1);
assert_eq!(tlv_res.len_value(), 1);
assert!(!tlv_res.is_empty());
assert_eq!(tlv_res.value()[0], 5);
}
#[test]
fn test_owned_tlv_empty() {
let tlv_res = TlvOwned::new_empty(TlvType::FlowLabel);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::FlowLabel)
);
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_custom_type() {
let tlv_res = TlvOwned::new_with_custom_type(32, &[]);
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_conversion_to_bytes() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_res.to_vec(), tlv_owned_from_conversion.to_vec());
}
} }

View File

@ -1,6 +1,8 @@
//! Abstractions for the Message to User CFDP TLV subtype. //! Abstractions for the Message to User CFDP TLV subtype.
use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv}; #[cfg(feature = "alloc")]
use crate::ByteConversionError; use super::TlvOwned;
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use crate::{cfdp::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError}, ByteConversionError};
use delegate::delegate; use delegate::delegate;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -10,7 +12,7 @@ pub struct MsgToUserTlv<'data> {
impl<'data> MsgToUserTlv<'data> { impl<'data> MsgToUserTlv<'data> {
/// Create a new message to user TLV where the type field is set correctly. /// Create a new message to user TLV where the type field is set correctly.
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvError> { pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLargeError> {
Ok(Self { Ok(Self {
tlv: Tlv::new(TlvType::MsgToUser, value)?, tlv: Tlv::new(TlvType::MsgToUser, value)?,
}) })
@ -60,21 +62,36 @@ impl<'data> MsgToUserTlv<'data> {
match msg_to_user.tlv.tlv_type_field() { match msg_to_user.tlv.tlv_type_field() {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::MsgToUser { if tlv_type != TlvType::MsgToUser {
return Err(TlvLvError::InvalidTlvTypeField { return Err(InvalidTlvTypeFieldError {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
}); }.into());
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(TlvLvError::InvalidTlvTypeField { return Err(InvalidTlvTypeFieldError {
found: raw, found: raw,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
}); }.into());
} }
} }
Ok(msg_to_user) Ok(msg_to_user)
} }
pub fn to_tlv(&self) -> Tlv<'data> {
self.tlv
}
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
self.tlv.to_owned()
}
}
impl<'a> From<MsgToUserTlv<'a>> for Tlv<'a> {
fn from(value: MsgToUserTlv<'a>) -> Tlv<'a> {
value.to_tlv()
}
} }
impl WritableTlv for MsgToUserTlv<'_> { impl WritableTlv for MsgToUserTlv<'_> {
@ -139,6 +156,40 @@ mod tests {
); );
} }
#[test]
fn test_msg_to_user_type_reduction() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv = msg_to_user.to_tlv();
assert_eq!(
tlv.tlv_type_field(),
TlvTypeField::Standard(TlvType::MsgToUser)
);
assert_eq!(tlv.value(), custom_value);
}
#[test]
fn test_msg_to_user_to_tlv() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv: Tlv = msg_to_user.into();
assert_eq!(msg_to_user.to_tlv(), tlv);
}
#[test]
fn test_msg_to_user_owner_converter() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv = msg_to_user.to_owned();
assert_eq!(
tlv.tlv_type_field(),
TlvTypeField::Standard(TlvType::MsgToUser)
);
assert_eq!(tlv.value(), custom_value);
}
#[test] #[test]
fn test_reserved_msg_deserialization() { fn test_reserved_msg_deserialization() {
let custom_value: [u8; 3] = [1, 2, 3]; let custom_value: [u8; 3] = [1, 2, 3];
@ -154,9 +205,9 @@ mod tests {
fn test_reserved_msg_deserialization_invalid_type() { fn test_reserved_msg_deserialization_invalid_type() {
let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3]; let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3];
let error = MsgToUserTlv::from_bytes(&trash).unwrap_err(); let error = MsgToUserTlv::from_bytes(&trash).unwrap_err();
if let TlvLvError::InvalidTlvTypeField { found, expected } = error { if let TlvLvError::InvalidTlvTypeField(inner) = error {
assert_eq!(found, TlvType::FlowLabel as u8); assert_eq!(inner.found, TlvType::FlowLabel as u8);
assert_eq!(expected, Some(TlvType::MsgToUser as u8)); assert_eq!(inner.expected, Some(TlvType::MsgToUser as u8));
} else { } else {
panic!("Wrong error type returned: {:?}", error); panic!("Wrong error type returned: {:?}", error);
} }

View File

@ -1,18 +1,16 @@
//! Common definitions and helpers required to create PUS TMTC packets according to //! Common definitions and helpers required to create PUS TMTC packets according to
//! [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/) //! [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/)
//! //!
//! You can find the PUS telecommand definitions in the [tc] module and ithe PUS telemetry definitions //! You can find the PUS telecommand types in the [tc] module and the the PUS telemetry
//! inside the [tm] module. //! types inside the [tm] module.
use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE}; use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::{Debug, Display, Formatter}; use core::fmt::Debug;
use core::mem::size_of; use core::mem::size_of;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod event; pub mod event;
pub mod hk; pub mod hk;
@ -22,7 +20,6 @@ pub mod tm;
pub mod verification; pub mod verification;
pub type CrcType = u16; pub type CrcType = u16;
pub const CCSDS_HEADER_LEN: usize = size_of::<crate::zc::SpHeader>();
#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)] #[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -149,50 +146,19 @@ pub enum PfcReal {
DoubleMilStd = 4, DoubleMilStd = 4,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PusError { pub enum PusError {
#[error("PUS version {0:?} not supported")]
VersionNotSupported(PusVersion), VersionNotSupported(PusVersion),
#[error("checksum verification for crc16 {0:#06x} failed")]
ChecksumFailure(u16), ChecksumFailure(u16),
/// CRC16 needs to be calculated first /// CRC16 needs to be calculated first
CrcCalculationMissing, //#[error("crc16 was not calculated")]
ByteConversion(ByteConversionError), //CrcCalculationMissing,
} #[error("pus error: {0}")]
ByteConversion(#[from] ByteConversionError),
impl Display for PusError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PusError::VersionNotSupported(v) => {
write!(f, "PUS version {v:?} not supported")
}
PusError::ChecksumFailure(crc) => {
write!(f, "checksum verification for crc16 {crc:#06x} failed")
}
PusError::CrcCalculationMissing => {
write!(f, "crc16 was not calculated")
}
PusError::ByteConversion(e) => {
write!(f, "pus error: {e}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PusError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if let PusError::ByteConversion(e) = self {
return Some(e);
}
None
}
}
impl From<ByteConversionError> for PusError {
fn from(e: ByteConversionError) -> Self {
PusError::ByteConversion(e)
}
} }
/// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry /// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry
@ -246,7 +212,7 @@ pub(crate) fn verify_crc16_ccitt_false_from_raw_to_pus_error(
crc16: u16, crc16: u16,
) -> Result<(), PusError> { ) -> Result<(), PusError> {
verify_crc16_ccitt_false_from_raw(raw_data) verify_crc16_ccitt_false_from_raw(raw_data)
.then(|| ()) .then_some(())
.ok_or(PusError::ChecksumFailure(crc16)) .ok_or(PusError::ChecksumFailure(crc16))
} }
@ -262,9 +228,13 @@ pub(crate) fn verify_crc16_ccitt_false_from_raw(raw_data: &[u8]) -> bool {
macro_rules! ccsds_impl { macro_rules! ccsds_impl {
() => { () => {
delegate!(to self.sp_header { delegate!(to self.sp_header {
#[inline]
fn ccsds_version(&self) -> u8; fn ccsds_version(&self) -> u8;
#[inline]
fn packet_id(&self) -> crate::PacketId; fn packet_id(&self) -> crate::PacketId;
#[inline]
fn psc(&self) -> crate::PacketSequenceCtrl; fn psc(&self) -> crate::PacketSequenceCtrl;
#[inline]
fn data_len(&self) -> u16; fn data_len(&self) -> u16;
}); });
} }
@ -273,8 +243,11 @@ macro_rules! ccsds_impl {
macro_rules! sp_header_impls { macro_rules! sp_header_impls {
() => { () => {
delegate!(to self.sp_header { delegate!(to self.sp_header {
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool; pub fn set_apid(&mut self, apid: u16) -> bool;
#[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool; pub fn set_seq_count(&mut self, seq_count: u16) -> bool;
#[inline]
pub fn set_seq_flags(&mut self, seq_flag: SequenceFlags); pub fn set_seq_flags(&mut self, seq_flag: SequenceFlags);
}); });
} }

View File

@ -54,6 +54,7 @@ pub enum SchedStatus {
} }
impl From<bool> for SchedStatus { impl From<bool> for SchedStatus {
#[inline]
fn from(value: bool) -> Self { fn from(value: bool) -> Self {
if value { if value {
SchedStatus::Enabled SchedStatus::Enabled

View File

@ -9,9 +9,11 @@
//! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader}; //! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader};
//! //!
//! // Create a ping telecommand with no user application data //! // Create a ping telecommand with no user application data
//! let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); //! let pus_tc = PusTcCreator::new_no_app_data(
//! let tc_header = PusTcSecondaryHeader::new_simple(17, 1); //! SpHeader::new_from_apid(0x02),
//! let pus_tc = PusTcCreator::new_no_app_data(&mut sph, tc_header, true); //! PusTcSecondaryHeader::new_simple(17, 1),
//! true
//! );
//! println!("{:?}", pus_tc); //! println!("{:?}", pus_tc);
//! assert_eq!(pus_tc.service(), 17); //! assert_eq!(pus_tc.service(), 17);
//! assert_eq!(pus_tc.subservice(), 1); //! assert_eq!(pus_tc.subservice(), 1);
@ -43,7 +45,7 @@ use delegate::delegate;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::AsBytes; use zerocopy::{FromBytes, IntoBytes};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
@ -84,9 +86,9 @@ pub trait GenericPusTcSecondaryHeader {
pub mod zc { pub mod zc {
use crate::ecss::tc::GenericPusTcSecondaryHeader; use crate::ecss::tc::GenericPusTcSecondaryHeader;
use crate::ecss::{PusError, PusVersion}; use crate::ecss::{PusError, PusVersion};
use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16}; use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16};
#[derive(FromZeroes, FromBytes, AsBytes, Unaligned)] #[derive(FromBytes, IntoBytes, Immutable, Unaligned)]
#[repr(C)] #[repr(C)]
pub struct PusTcSecondaryHeader { pub struct PusTcSecondaryHeader {
version_ack: u8, version_ack: u8,
@ -111,36 +113,31 @@ pub mod zc {
} }
impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader { impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
PusVersion::try_from(self.version_ack >> 4 & 0b1111).unwrap_or(PusVersion::Invalid) PusVersion::try_from(self.version_ack >> 4 & 0b1111).unwrap_or(PusVersion::Invalid)
} }
#[inline]
fn ack_flags(&self) -> u8 { fn ack_flags(&self) -> u8 {
self.version_ack & 0b1111 self.version_ack & 0b1111
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn source_id(&self) -> u16 { fn source_id(&self) -> u16 {
self.source_id.get() self.source_id.get()
} }
} }
impl PusTcSecondaryHeader {
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
Self::read_from(slice)
}
}
} }
#[derive(PartialEq, Eq, Copy, Clone, Debug)] #[derive(PartialEq, Eq, Copy, Clone, Debug)]
@ -155,22 +152,27 @@ pub struct PusTcSecondaryHeader {
} }
impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader { impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
self.version self.version
} }
#[inline]
fn ack_flags(&self) -> u8 { fn ack_flags(&self) -> u8 {
self.ack self.ack
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn source_id(&self) -> u16 { fn source_id(&self) -> u16 {
self.source_id self.source_id
} }
@ -191,6 +193,7 @@ impl TryFrom<zc::PusTcSecondaryHeader> for PusTcSecondaryHeader {
} }
impl PusTcSecondaryHeader { impl PusTcSecondaryHeader {
#[inline]
pub fn new_simple(service: u8, subservice: u8) -> Self { pub fn new_simple(service: u8, subservice: u8) -> Self {
PusTcSecondaryHeader { PusTcSecondaryHeader {
service, service,
@ -201,6 +204,7 @@ impl PusTcSecondaryHeader {
} }
} }
#[inline]
pub fn new(service: u8, subservice: u8, ack: u8, source_id: u16) -> Self { pub fn new(service: u8, subservice: u8, ack: u8, source_id: u16) -> Self {
PusTcSecondaryHeader { PusTcSecondaryHeader {
service, service,
@ -223,35 +227,36 @@ impl PusTcSecondaryHeader {
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PusTcCreator<'raw_data> { pub struct PusTcCreator<'app_data> {
sp_header: SpHeader, sp_header: SpHeader,
pub sec_header: PusTcSecondaryHeader, pub sec_header: PusTcSecondaryHeader,
app_data: &'raw_data [u8], app_data: &'app_data [u8],
} }
impl<'raw_data> PusTcCreator<'raw_data> { impl<'app_data> PusTcCreator<'app_data> {
/// Generates a new struct instance. /// Generates a new struct instance.
/// ///
/// # Arguments /// # Arguments
/// ///
/// * `sp_header` - Space packet header information. The correct packet type will be set /// * `sp_header` - Space packet header information. The correct packet type and the secondary
/// automatically /// header flag are set correctly by the constructor.
/// * `sec_header` - Information contained in the data field header, including the service /// * `sec_header` - Information contained in the data field header, including the service
/// and subservice type /// and subservice type
/// * `app_data` - Custom application data /// * `app_data` - Custom application data
/// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length /// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length
/// field. If this is not set to true, [Self::update_ccsds_data_len] can be called to set /// field. If this is not set to true, [Self::update_ccsds_data_len] can be called to set
/// the correct value to this field manually /// the correct value to this field manually
#[inline]
pub fn new( pub fn new(
sp_header: &mut SpHeader, mut sp_header: SpHeader,
sec_header: PusTcSecondaryHeader, sec_header: PusTcSecondaryHeader,
app_data: &'raw_data [u8], app_data: &'app_data [u8],
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
sp_header.set_packet_type(PacketType::Tc); sp_header.set_packet_type(PacketType::Tc);
sp_header.set_sec_header_flag(); sp_header.set_sec_header_flag();
let mut pus_tc = Self { let mut pus_tc = Self {
sp_header: *sp_header, sp_header,
app_data, app_data,
sec_header, sec_header,
}; };
@ -263,11 +268,12 @@ impl<'raw_data> PusTcCreator<'raw_data> {
/// Simplified version of the [Self::new] function which allows to only specify service /// Simplified version of the [Self::new] function which allows to only specify service
/// and subservice instead of the full PUS TC secondary header. /// and subservice instead of the full PUS TC secondary header.
#[inline]
pub fn new_simple( pub fn new_simple(
sph: &mut SpHeader, sph: SpHeader,
service: u8, service: u8,
subservice: u8, subservice: u8,
app_data: &'raw_data [u8], app_data: &'app_data [u8],
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
Self::new( Self::new(
@ -278,18 +284,26 @@ impl<'raw_data> PusTcCreator<'raw_data> {
) )
} }
#[inline]
pub fn new_no_app_data( pub fn new_no_app_data(
sp_header: &mut SpHeader, sp_header: SpHeader,
sec_header: PusTcSecondaryHeader, sec_header: PusTcSecondaryHeader,
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
Self::new(sp_header, sec_header, &[], set_ccsds_len) Self::new(sp_header, sec_header, &[], set_ccsds_len)
} }
#[inline]
pub fn sp_header(&self) -> &SpHeader { pub fn sp_header(&self) -> &SpHeader {
&self.sp_header &self.sp_header
} }
#[inline]
pub fn sp_header_mut(&mut self) -> &mut SpHeader {
&mut self.sp_header
}
#[inline]
pub fn set_ack_field(&mut self, ack: u8) -> bool { pub fn set_ack_field(&mut self, ack: u8) -> bool {
if ack > 0b1111 { if ack > 0b1111 {
return false; return false;
@ -298,6 +312,7 @@ impl<'raw_data> PusTcCreator<'raw_data> {
true true
} }
#[inline]
pub fn set_source_id(&mut self, source_id: u16) { pub fn set_source_id(&mut self, source_id: u16) {
self.sec_header.source_id = source_id; self.sec_header.source_id = source_id;
} }
@ -310,6 +325,7 @@ impl<'raw_data> PusTcCreator<'raw_data> {
/// If this was not done or the application data is set or changed after construction, /// If this was not done or the application data is set or changed after construction,
/// this function needs to be called to ensure that the data length field of the CCSDS header /// this function needs to be called to ensure that the data length field of the CCSDS header
/// is set correctly. /// is set correctly.
#[inline]
pub fn update_ccsds_data_len(&mut self) { pub fn update_ccsds_data_len(&mut self) {
self.sp_header.data_len = self.sp_header.data_len =
self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1; self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1;
@ -345,6 +361,7 @@ impl<'raw_data> PusTcCreator<'raw_data> {
} }
impl WritablePusPacket for PusTcCreator<'_> { impl WritablePusPacket for PusTcCreator<'_> {
#[inline]
fn len_written(&self) -> usize { fn len_written(&self) -> usize {
PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len() PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len()
} }
@ -365,8 +382,8 @@ impl WritablePusPacket for PusTcCreator<'_> {
curr_idx += CCSDS_HEADER_LEN; curr_idx += CCSDS_HEADER_LEN;
let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
sec_header sec_header
.write_to_bytes(&mut slice[curr_idx..curr_idx + tc_header_len]) .write_to(&mut slice[curr_idx..curr_idx + tc_header_len])
.ok_or(ByteConversionError::ZeroCopyToError)?; .map_err(|_| ByteConversionError::ZeroCopyToError)?;
curr_idx += tc_header_len; curr_idx += tc_header_len;
slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data); slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data);
@ -385,15 +402,20 @@ impl CcsdsPacket for PusTcCreator<'_> {
impl PusPacket for PusTcCreator<'_> { impl PusPacket for PusTcCreator<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.app_data self.app_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.calc_own_crc16()) Some(self.calc_own_crc16())
} }
@ -401,10 +423,15 @@ impl PusPacket for PusTcCreator<'_> {
impl GenericPusTcSecondaryHeader for PusTcCreator<'_> { impl GenericPusTcSecondaryHeader for PusTcCreator<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn source_id(&self) -> u16; fn source_id(&self) -> u16;
#[inline]
fn ack_flags(&self) -> u8; fn ack_flags(&self) -> u8;
}); });
} }
@ -465,10 +492,10 @@ impl<'raw_data> PusTcReader<'raw_data> {
} }
.into()); .into());
} }
let sec_header = zc::PusTcSecondaryHeader::from_bytes( let sec_header = zc::PusTcSecondaryHeader::read_from_bytes(
&slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN], &slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN],
) )
.ok_or(ByteConversionError::ZeroCopyFromError)?; .map_err(|_| ByteConversionError::ZeroCopyFromError)?;
current_idx += PUC_TC_SECONDARY_HEADER_LEN; current_idx += PUC_TC_SECONDARY_HEADER_LEN;
let raw_data = &slice[0..total_len]; let raw_data = &slice[0..total_len];
let pus_tc = Self { let pus_tc = Self {
@ -482,24 +509,29 @@ impl<'raw_data> PusTcReader<'raw_data> {
Ok((pus_tc, total_len)) Ok((pus_tc, total_len))
} }
#[inline]
pub fn app_data(&self) -> &[u8] { pub fn app_data(&self) -> &[u8] {
self.user_data() self.user_data()
} }
#[inline]
pub fn raw_data(&self) -> &[u8] { pub fn raw_data(&self) -> &[u8] {
self.raw_data self.raw_data
} }
#[inline]
pub fn len_packed(&self) -> usize { pub fn len_packed(&self) -> usize {
self.sp_header.total_len() self.sp_header.total_len()
} }
#[inline]
pub fn sp_header(&self) -> &SpHeader { pub fn sp_header(&self) -> &SpHeader {
&self.sp_header &self.sp_header
} }
} }
impl PartialEq for PusTcReader<'_> { impl PartialEq for PusTcReader<'_> {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.raw_data == other.raw_data self.raw_data == other.raw_data
} }
@ -511,15 +543,20 @@ impl CcsdsPacket for PusTcReader<'_> {
impl PusPacket for PusTcReader<'_> { impl PusPacket for PusTcReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.app_data self.app_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.crc16) Some(self.crc16)
} }
@ -527,10 +564,15 @@ impl PusPacket for PusTcReader<'_> {
impl GenericPusTcSecondaryHeader for PusTcReader<'_> { impl GenericPusTcSecondaryHeader for PusTcReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn source_id(&self) -> u16; fn source_id(&self) -> u16;
#[inline]
fn ack_flags(&self) -> u8; fn ack_flags(&self) -> u8;
}); });
} }
@ -568,19 +610,19 @@ mod tests {
use postcard::{from_bytes, to_allocvec}; use postcard::{from_bytes, to_allocvec};
fn base_ping_tc_full_ctor() -> PusTcCreator<'static> { fn base_ping_tc_full_ctor() -> PusTcCreator<'static> {
let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap();
let tc_header = PusTcSecondaryHeader::new_simple(17, 1); let tc_header = PusTcSecondaryHeader::new_simple(17, 1);
PusTcCreator::new_no_app_data(&mut sph, tc_header, true) PusTcCreator::new_no_app_data(sph, tc_header, true)
} }
fn base_ping_tc_simple_ctor() -> PusTcCreator<'static> { fn base_ping_tc_simple_ctor() -> PusTcCreator<'static> {
let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap();
PusTcCreator::new_simple(&mut sph, 17, 1, &[], true) PusTcCreator::new_simple(sph, 17, 1, &[], true)
} }
fn base_ping_tc_simple_ctor_with_app_data(app_data: &'static [u8]) -> PusTcCreator<'static> { fn base_ping_tc_simple_ctor_with_app_data(app_data: &'static [u8]) -> PusTcCreator<'static> {
let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap();
PusTcCreator::new_simple(&mut sph, 17, 1, app_data, true) PusTcCreator::new_simple(sph, 17, 1, app_data, true)
} }
#[test] #[test]
@ -636,8 +678,8 @@ mod tests {
#[test] #[test]
fn test_update_func() { fn test_update_func() {
let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap();
let mut tc = PusTcCreator::new_simple(&mut sph, 17, 1, &[], false); let mut tc = PusTcCreator::new_simple(sph, 17, 1, &[], false);
assert_eq!(tc.data_len(), 0); assert_eq!(tc.data_len(), 0);
tc.update_ccsds_data_len(); tc.update_ccsds_data_len();
assert_eq!(tc.data_len(), 6); assert_eq!(tc.data_len(), 6);
@ -809,7 +851,8 @@ mod tests {
if !has_user_data { if !has_user_data {
assert!(tc.user_data().is_empty()); assert!(tc.user_data().is_empty());
} }
let mut comp_header = SpHeader::tc_unseg(0x02, 0x34, exp_full_len as u16 - 7).unwrap(); let mut comp_header =
SpHeader::new_for_unseg_tc_checked(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
comp_header.set_sec_header_flag(); comp_header.set_sec_header_flag();
assert_eq!(*tc.sp_header(), comp_header); assert_eq!(*tc.sp_header(), comp_header);
} }
@ -820,7 +863,8 @@ mod tests {
assert!(tc.user_data().is_empty()); assert!(tc.user_data().is_empty());
} }
assert_eq!(tc.len_packed(), exp_full_len); assert_eq!(tc.len_packed(), exp_full_len);
let mut comp_header = SpHeader::tc_unseg(0x02, 0x34, exp_full_len as u16 - 7).unwrap(); let mut comp_header =
SpHeader::new_for_unseg_tc_checked(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
comp_header.set_sec_header_flag(); comp_header.set_sec_header_flag();
assert_eq!(*tc.sp_header(), comp_header); assert_eq!(*tc.sp_header(), comp_header);
} }

View File

@ -1,5 +1,47 @@
//! This module contains all components required to create a ECSS PUS C telemetry packets according //! This module contains all components required to create a ECSS PUS C telemetry packets according
//! to [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/). //! to [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/).
//!
//! # Examples
//!
//! ```rust
//! use spacepackets::time::TimeWriter;
//! use spacepackets::time::cds::CdsTime;
//! use spacepackets::{CcsdsPacket, SpHeader};
//! use spacepackets::ecss::{PusPacket, WritablePusPacket};
//! use spacepackets::ecss::tm::{PusTmCreator, PusTmReader, PusTmSecondaryHeader};
//!
//! let mut time_buf: [u8; 7] = [0; 7];
//! let time_now = CdsTime::now_with_u16_days().expect("creating CDS timestamp failed");
//! // This can definitely hold the timestamp, so it is okay to unwrap.
//! time_now.write_to_bytes(&mut time_buf).unwrap();
//!
//! // Create a ping telemetry with no user source data
//! let ping_tm = PusTmCreator::new_no_source_data(
//! SpHeader::new_from_apid(0x02),
//! PusTmSecondaryHeader::new_simple(17, 2, &time_buf),
//! true
//! );
//! println!("{:?}", ping_tm);
//! assert_eq!(ping_tm.service(), 17);
//! assert_eq!(ping_tm.subservice(), 2);
//! assert_eq!(ping_tm.apid(), 0x02);
//!
//! // Serialize TM into a raw buffer
//! let mut test_buf: [u8; 32] = [0; 32];
//! let written_size = ping_tm
//! .write_to_bytes(test_buf.as_mut_slice())
//! .expect("Error writing TC to buffer");
//! assert_eq!(written_size, 22);
//! println!("{:?}", &test_buf[0..written_size]);
//!
//! // Deserialize from the raw byte representation
//! let (ping_tm_reader, read_size) = PusTmReader::new(&test_buf, 7).expect("Deserialization failed");
//! assert_eq!(written_size, read_size);
//! assert_eq!(ping_tm_reader.service(), 17);
//! assert_eq!(ping_tm_reader.subservice(), 2);
//! assert_eq!(ping_tm_reader.apid(), 0x02);
//! assert_eq!(ping_tm_reader.timestamp(), &time_buf);
//! ```
use crate::ecss::{ use crate::ecss::{
calc_pus_crc16, ccsds_impl, crc_from_raw_data, sp_header_impls, user_data_from_raw, calc_pus_crc16, ccsds_impl, crc_from_raw_data, sp_header_impls, user_data_from_raw,
verify_crc16_ccitt_false_from_raw_to_pus_error, CrcType, PusError, PusPacket, PusVersion, verify_crc16_ccitt_false_from_raw_to_pus_error, CrcType, PusError, PusPacket, PusVersion,
@ -12,7 +54,7 @@ use crate::{
use core::mem::size_of; use core::mem::size_of;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::AsBytes; use zerocopy::{FromBytes, IntoBytes};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
@ -41,9 +83,9 @@ pub trait GenericPusTmSecondaryHeader {
pub mod zc { pub mod zc {
use super::GenericPusTmSecondaryHeader; use super::GenericPusTmSecondaryHeader;
use crate::ecss::{PusError, PusVersion}; use crate::ecss::{PusError, PusVersion};
use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16}; use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16};
#[derive(FromBytes, FromZeroes, AsBytes, Unaligned)] #[derive(FromBytes, IntoBytes, Immutable, Unaligned)]
#[repr(C)] #[repr(C)]
pub struct PusTmSecHeaderWithoutTimestamp { pub struct PusTmSecHeaderWithoutTimestamp {
pus_version_and_sc_time_ref_status: u8, pus_version_and_sc_time_ref_status: u8,
@ -75,38 +117,34 @@ pub mod zc {
} }
} }
impl PusTmSecHeaderWithoutTimestamp {
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
Self::read_from(slice)
}
}
impl GenericPusTmSecondaryHeader for PusTmSecHeaderWithoutTimestamp { impl GenericPusTmSecondaryHeader for PusTmSecHeaderWithoutTimestamp {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
PusVersion::try_from(self.pus_version_and_sc_time_ref_status >> 4 & 0b1111) PusVersion::try_from(self.pus_version_and_sc_time_ref_status >> 4 & 0b1111)
.unwrap_or(PusVersion::Invalid) .unwrap_or(PusVersion::Invalid)
} }
#[inline]
fn sc_time_ref_status(&self) -> u8 { fn sc_time_ref_status(&self) -> u8 {
self.pus_version_and_sc_time_ref_status & 0b1111 self.pus_version_and_sc_time_ref_status & 0b1111
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn msg_counter(&self) -> u16 { fn msg_counter(&self) -> u16 {
self.msg_counter.get() self.msg_counter.get()
} }
#[inline]
fn dest_id(&self) -> u16 { fn dest_id(&self) -> u16 {
self.dest_id.get() self.dest_id.get()
} }
@ -123,25 +161,28 @@ pub struct PusTmSecondaryHeader<'stamp> {
pub subservice: u8, pub subservice: u8,
pub msg_counter: u16, pub msg_counter: u16,
pub dest_id: u16, pub dest_id: u16,
pub time_stamp: &'stamp [u8], pub timestamp: &'stamp [u8],
} }
impl<'stamp> PusTmSecondaryHeader<'stamp> { impl<'stamp> PusTmSecondaryHeader<'stamp> {
pub fn new_simple(service: u8, subservice: u8, time_stamp: &'stamp [u8]) -> Self { #[inline]
Self::new(service, subservice, 0, 0, time_stamp) pub fn new_simple(service: u8, subservice: u8, timestamp: &'stamp [u8]) -> Self {
Self::new(service, subservice, 0, 0, timestamp)
} }
/// Like [Self::new_simple] but without a timestamp. /// Like [Self::new_simple] but without a timestamp.
#[inline]
pub fn new_simple_no_timestamp(service: u8, subservice: u8) -> Self { pub fn new_simple_no_timestamp(service: u8, subservice: u8) -> Self {
Self::new(service, subservice, 0, 0, &[]) Self::new(service, subservice, 0, 0, &[])
} }
#[inline]
pub fn new( pub fn new(
service: u8, service: u8,
subservice: u8, subservice: u8,
msg_counter: u16, msg_counter: u16,
dest_id: u16, dest_id: u16,
time_stamp: &'stamp [u8], timestamp: &'stamp [u8],
) -> Self { ) -> Self {
PusTmSecondaryHeader { PusTmSecondaryHeader {
pus_version: PusVersion::PusC, pus_version: PusVersion::PusC,
@ -150,32 +191,38 @@ impl<'stamp> PusTmSecondaryHeader<'stamp> {
subservice, subservice,
msg_counter, msg_counter,
dest_id, dest_id,
time_stamp, timestamp,
} }
} }
} }
impl GenericPusTmSecondaryHeader for PusTmSecondaryHeader<'_> { impl GenericPusTmSecondaryHeader for PusTmSecondaryHeader<'_> {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
self.pus_version self.pus_version
} }
#[inline]
fn sc_time_ref_status(&self) -> u8 { fn sc_time_ref_status(&self) -> u8 {
self.sc_time_ref_status self.sc_time_ref_status
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn msg_counter(&self) -> u16 { fn msg_counter(&self) -> u16 {
self.msg_counter self.msg_counter
} }
#[inline]
fn dest_id(&self) -> u16 { fn dest_id(&self) -> u16 {
self.dest_id self.dest_id
} }
@ -184,6 +231,7 @@ impl GenericPusTmSecondaryHeader for PusTmSecondaryHeader<'_> {
impl<'slice> TryFrom<zc::PusTmSecHeader<'slice>> for PusTmSecondaryHeader<'slice> { impl<'slice> TryFrom<zc::PusTmSecHeader<'slice>> for PusTmSecondaryHeader<'slice> {
type Error = (); type Error = ();
#[inline]
fn try_from(sec_header: zc::PusTmSecHeader<'slice>) -> Result<Self, Self::Error> { fn try_from(sec_header: zc::PusTmSecHeader<'slice>) -> Result<Self, Self::Error> {
Ok(PusTmSecondaryHeader { Ok(PusTmSecondaryHeader {
pus_version: sec_header.zc_header.pus_version(), pus_version: sec_header.zc_header.pus_version(),
@ -192,7 +240,7 @@ impl<'slice> TryFrom<zc::PusTmSecHeader<'slice>> for PusTmSecondaryHeader<'slice
subservice: sec_header.zc_header.subservice(), subservice: sec_header.zc_header.subservice(),
msg_counter: sec_header.zc_header.msg_counter(), msg_counter: sec_header.zc_header.msg_counter(),
dest_id: sec_header.zc_header.dest_id(), dest_id: sec_header.zc_header.dest_id(),
time_stamp: sec_header.timestamp, timestamp: sec_header.timestamp,
}) })
} }
} }
@ -208,43 +256,45 @@ impl<'slice> TryFrom<zc::PusTmSecHeader<'slice>> for PusTmSecondaryHeader<'slice
/// ///
/// # Lifetimes /// # Lifetimes
/// ///
/// * `'raw_data` - This is the lifetime of the user provided time stamp and source data. /// * `'time` - This is the lifetime of the user provided timestamp.
/// * `'src_data` - This is the lifetime of the user provided source data.
#[derive(Eq, Debug, Copy, Clone)] #[derive(Eq, Debug, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PusTmCreator<'time, 'raw_data> { pub struct PusTmCreator<'time, 'src_data> {
pub sp_header: SpHeader, pub sp_header: SpHeader,
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
pub sec_header: PusTmSecondaryHeader<'time>, pub sec_header: PusTmSecondaryHeader<'time>,
source_data: &'raw_data [u8], source_data: &'src_data [u8],
/// If this is set to false, a manual call to [Self::calc_own_crc16] or /// If this is set to false, a manual call to [Self::calc_own_crc16] or
/// [Self::update_packet_fields] is necessary for the serialized or cached CRC16 to be valid. /// [Self::update_packet_fields] is necessary for the serialized or cached CRC16 to be valid.
pub calc_crc_on_serialization: bool, pub calc_crc_on_serialization: bool,
} }
impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> { impl<'time, 'src_data> PusTmCreator<'time, 'src_data> {
/// Generates a new struct instance. /// Generates a new struct instance.
/// ///
/// # Arguments /// # Arguments
/// ///
/// * `sp_header` - Space packet header information. The correct packet type will be set /// * `sp_header` - Space packet header information. The correct packet type and the secondary
/// automatically /// header flag are set correctly by the constructor.
/// * `sec_header` - Information contained in the secondary header, including the service /// * `sec_header` - Information contained in the secondary header, including the service
/// and subservice type /// and subservice type
/// * `source_data` - Custom application data /// * `source_data` - Custom application data
/// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length /// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length
/// field. If this is not set to true, [Self::update_ccsds_data_len] can be called to set /// field. If this is not set to true, [Self::update_ccsds_data_len] can be called to set
/// the correct value to this field manually /// the correct value to this field manually
#[inline]
pub fn new( pub fn new(
sp_header: &mut SpHeader, mut sp_header: SpHeader,
sec_header: PusTmSecondaryHeader<'time>, sec_header: PusTmSecondaryHeader<'time>,
source_data: &'raw_data [u8], source_data: &'src_data [u8],
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
sp_header.set_packet_type(PacketType::Tm); sp_header.set_packet_type(PacketType::Tm);
sp_header.set_sec_header_flag(); sp_header.set_sec_header_flag();
let mut pus_tm = Self { let mut pus_tm = Self {
sp_header: *sp_header, sp_header,
source_data, source_data,
sec_header, sec_header,
calc_crc_on_serialization: true, calc_crc_on_serialization: true,
@ -255,50 +305,52 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
pus_tm pus_tm
} }
#[inline]
pub fn new_simple( pub fn new_simple(
sp_header: &mut SpHeader, sp_header: SpHeader,
service: u8, service: u8,
subservice: u8, subservice: u8,
time_provider: &impl TimeWriter, time_provider: &impl TimeWriter,
stamp_buf: &'time mut [u8], stamp_buf: &'time mut [u8],
source_data: Option<&'raw_data [u8]>, source_data: &'src_data [u8],
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Result<Self, TimestampError> { ) -> Result<Self, TimestampError> {
let stamp_size = time_provider.write_to_bytes(stamp_buf)?; let stamp_size = time_provider.write_to_bytes(stamp_buf)?;
let sec_header = let sec_header =
PusTmSecondaryHeader::new_simple(service, subservice, &stamp_buf[0..stamp_size]); PusTmSecondaryHeader::new_simple(service, subservice, &stamp_buf[0..stamp_size]);
Ok(Self::new( Ok(Self::new(sp_header, sec_header, source_data, set_ccsds_len))
sp_header,
sec_header,
source_data.unwrap_or(&[]),
set_ccsds_len,
))
} }
#[inline]
pub fn new_no_source_data( pub fn new_no_source_data(
sp_header: &mut SpHeader, sp_header: SpHeader,
sec_header: PusTmSecondaryHeader<'time>, sec_header: PusTmSecondaryHeader<'time>,
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
Self::new(sp_header, sec_header, &[], set_ccsds_len) Self::new(sp_header, sec_header, &[], set_ccsds_len)
} }
#[inline]
pub fn timestamp(&self) -> &[u8] { pub fn timestamp(&self) -> &[u8] {
self.sec_header.time_stamp self.sec_header.timestamp
} }
#[inline]
pub fn source_data(&self) -> &[u8] { pub fn source_data(&self) -> &[u8] {
self.source_data self.source_data
} }
#[inline]
pub fn set_dest_id(&mut self, dest_id: u16) { pub fn set_dest_id(&mut self, dest_id: u16) {
self.sec_header.dest_id = dest_id; self.sec_header.dest_id = dest_id;
} }
#[inline]
pub fn set_msg_counter(&mut self, msg_counter: u16) { pub fn set_msg_counter(&mut self, msg_counter: u16) {
self.sec_header.msg_counter = msg_counter self.sec_header.msg_counter = msg_counter
} }
#[inline]
pub fn set_sc_time_ref_status(&mut self, sc_time_ref_status: u8) { pub fn set_sc_time_ref_status(&mut self, sc_time_ref_status: u8) {
self.sec_header.sc_time_ref_status = sc_time_ref_status & 0b1111; self.sec_header.sc_time_ref_status = sc_time_ref_status & 0b1111;
} }
@ -310,6 +362,7 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
/// If this was not done or the time stamp or source data is set or changed after construction, /// If this was not done or the time stamp or source data is set or changed after construction,
/// this function needs to be called to ensure that the data length field of the CCSDS header /// this function needs to be called to ensure that the data length field of the CCSDS header
/// is set correctly /// is set correctly
#[inline]
pub fn update_ccsds_data_len(&mut self) { pub fn update_ccsds_data_len(&mut self) {
self.sp_header.data_len = self.sp_header.data_len =
self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1; self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1;
@ -323,12 +376,13 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
digest.update(sph_zc.as_bytes()); digest.update(sph_zc.as_bytes());
let pus_tc_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap(); let pus_tc_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap();
digest.update(pus_tc_header.as_bytes()); digest.update(pus_tc_header.as_bytes());
digest.update(self.sec_header.time_stamp); digest.update(self.sec_header.timestamp);
digest.update(self.source_data); digest.update(self.source_data);
digest.finalize() digest.finalize()
} }
/// This helper function calls both [Self::update_ccsds_data_len] and [Self::calc_own_crc16] /// This helper function calls both [Self::update_ccsds_data_len] and [Self::calc_own_crc16]
#[inline]
pub fn update_packet_fields(&mut self) { pub fn update_packet_fields(&mut self) {
self.update_ccsds_data_len(); self.update_ccsds_data_len();
} }
@ -349,12 +403,12 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
let sec_header_len = size_of::<zc::PusTmSecHeaderWithoutTimestamp>(); let sec_header_len = size_of::<zc::PusTmSecHeaderWithoutTimestamp>();
let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap();
sec_header sec_header
.write_to_bytes(&mut slice[curr_idx..curr_idx + sec_header_len]) .write_to(&mut slice[curr_idx..curr_idx + sec_header_len])
.ok_or(ByteConversionError::ZeroCopyToError)?; .map_err(|_| ByteConversionError::ZeroCopyToError)?;
curr_idx += sec_header_len; curr_idx += sec_header_len;
slice[curr_idx..curr_idx + self.sec_header.time_stamp.len()] slice[curr_idx..curr_idx + self.sec_header.timestamp.len()]
.copy_from_slice(self.sec_header.time_stamp); .copy_from_slice(self.sec_header.timestamp);
curr_idx += self.sec_header.time_stamp.len(); curr_idx += self.sec_header.timestamp.len();
slice[curr_idx..curr_idx + self.source_data.len()].copy_from_slice(self.source_data); slice[curr_idx..curr_idx + self.source_data.len()].copy_from_slice(self.source_data);
curr_idx += self.source_data.len(); curr_idx += self.source_data.len();
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
@ -368,15 +422,14 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub fn append_to_vec(&self, vec: &mut Vec<u8>) -> Result<usize, PusError> { pub fn append_to_vec(&self, vec: &mut Vec<u8>) -> Result<usize, PusError> {
let sph_zc = crate::zc::SpHeader::from(self.sp_header); let sph_zc = crate::zc::SpHeader::from(self.sp_header);
let mut appended_len = let mut appended_len = PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA + self.sec_header.timestamp.len();
PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA + self.sec_header.time_stamp.len();
appended_len += self.source_data.len(); appended_len += self.source_data.len();
let start_idx = vec.len(); let start_idx = vec.len();
vec.extend_from_slice(sph_zc.as_bytes()); vec.extend_from_slice(sph_zc.as_bytes());
// The PUS version is hardcoded to PUS C // The PUS version is hardcoded to PUS C
let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap();
vec.extend_from_slice(sec_header.as_bytes()); vec.extend_from_slice(sec_header.as_bytes());
vec.extend_from_slice(self.sec_header.time_stamp); vec.extend_from_slice(self.sec_header.timestamp);
vec.extend_from_slice(self.source_data); vec.extend_from_slice(self.source_data);
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&vec[start_idx..start_idx + appended_len - 2]); digest.update(&vec[start_idx..start_idx + appended_len - 2]);
@ -386,9 +439,10 @@ impl<'time, 'raw_data> PusTmCreator<'time, 'raw_data> {
} }
impl WritablePusPacket for PusTmCreator<'_, '_> { impl WritablePusPacket for PusTmCreator<'_, '_> {
#[inline]
fn len_written(&self) -> usize { fn len_written(&self) -> usize {
PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA
+ self.sec_header.time_stamp.len() + self.sec_header.timestamp.len()
+ self.source_data.len() + self.source_data.len()
} }
/// Write the raw PUS byte representation to a provided buffer. /// Write the raw PUS byte representation to a provided buffer.
@ -411,15 +465,20 @@ impl CcsdsPacket for PusTmCreator<'_, '_> {
impl PusPacket for PusTmCreator<'_, '_> { impl PusPacket for PusTmCreator<'_, '_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.source_data self.source_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.calc_own_crc16()) Some(self.calc_own_crc16())
} }
@ -427,11 +486,17 @@ impl PusPacket for PusTmCreator<'_, '_> {
impl GenericPusTmSecondaryHeader for PusTmCreator<'_, '_> { impl GenericPusTmSecondaryHeader for PusTmCreator<'_, '_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn dest_id(&self) -> u16; fn dest_id(&self) -> u16;
#[inline]
fn msg_counter(&self) -> u16; fn msg_counter(&self) -> u16;
#[inline]
fn sc_time_ref_status(&self) -> u8; fn sc_time_ref_status(&self) -> u8;
}); });
} }
@ -496,10 +561,10 @@ impl<'raw_data> PusTmReader<'raw_data> {
} }
.into()); .into());
} }
let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::from_bytes( let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::read_from_bytes(
&slice[current_idx..current_idx + PUS_TM_MIN_SEC_HEADER_LEN], &slice[current_idx..current_idx + PUS_TM_MIN_SEC_HEADER_LEN],
) )
.ok_or(ByteConversionError::ZeroCopyFromError)?; .map_err(|_| ByteConversionError::ZeroCopyFromError)?;
current_idx += PUS_TM_MIN_SEC_HEADER_LEN; current_idx += PUS_TM_MIN_SEC_HEADER_LEN;
let zc_sec_header_wrapper = zc::PusTmSecHeader { let zc_sec_header_wrapper = zc::PusTmSecHeader {
zc_header: sec_header_zc, zc_header: sec_header_zc,
@ -518,19 +583,23 @@ impl<'raw_data> PusTmReader<'raw_data> {
Ok((pus_tm, total_len)) Ok((pus_tm, total_len))
} }
#[inline]
pub fn len_packed(&self) -> usize { pub fn len_packed(&self) -> usize {
self.sp_header.total_len() self.sp_header.total_len()
} }
#[inline]
pub fn source_data(&self) -> &[u8] { pub fn source_data(&self) -> &[u8] {
self.user_data() self.user_data()
} }
#[inline]
pub fn timestamp(&self) -> &[u8] { pub fn timestamp(&self) -> &[u8] {
self.sec_header.time_stamp self.sec_header.timestamp
} }
/// This function will return the slice [Self] was constructed from. /// This function will return the slice [Self] was constructed from.
#[inline]
pub fn raw_data(&self) -> &[u8] { pub fn raw_data(&self) -> &[u8] {
self.raw_data self.raw_data
} }
@ -551,15 +620,20 @@ impl CcsdsPacket for PusTmReader<'_> {
impl PusPacket for PusTmReader<'_> { impl PusPacket for PusTmReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.source_data self.source_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.crc16) Some(self.crc16)
} }
@ -567,11 +641,17 @@ impl PusPacket for PusTmReader<'_> {
impl GenericPusTmSecondaryHeader for PusTmReader<'_> { impl GenericPusTmSecondaryHeader for PusTmReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn dest_id(&self) -> u16; fn dest_id(&self) -> u16;
#[inline]
fn msg_counter(&self) -> u16; fn msg_counter(&self) -> u16;
#[inline]
fn sc_time_ref_status(&self) -> u8; fn sc_time_ref_status(&self) -> u8;
}); });
} }
@ -620,7 +700,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
if raw_tm_len < CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN + timestamp_len { if raw_tm_len < CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN + timestamp_len {
return None; return None;
} }
let sp_header = crate::zc::SpHeader::from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap(); let sp_header = crate::zc::SpHeader::read_from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap();
if raw_tm_len < sp_header.total_len() { if raw_tm_len < sp_header.total_len() {
return None; return None;
} }
@ -633,6 +713,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
/// Set the sequence count. Returns false and does not update the value if the passed value /// Set the sequence count. Returns false and does not update the value if the passed value
/// exceeds [MAX_APID]. /// exceeds [MAX_APID].
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool { pub fn set_apid(&mut self, apid: u16) -> bool {
if apid > MAX_APID { if apid > MAX_APID {
return false; return false;
@ -645,11 +726,13 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
} }
/// This function sets the message counter in the PUS TM secondary header. /// This function sets the message counter in the PUS TM secondary header.
#[inline]
pub fn set_msg_count(&mut self, msg_count: u16) { pub fn set_msg_count(&mut self, msg_count: u16) {
self.raw_tm[9..11].copy_from_slice(&msg_count.to_be_bytes()); self.raw_tm[9..11].copy_from_slice(&msg_count.to_be_bytes());
} }
/// This function sets the destination ID in the PUS TM secondary header. /// This function sets the destination ID in the PUS TM secondary header.
#[inline]
pub fn set_destination_id(&mut self, dest_id: u16) { pub fn set_destination_id(&mut self, dest_id: u16) {
self.raw_tm[11..13].copy_from_slice(&dest_id.to_be_bytes()) self.raw_tm[11..13].copy_from_slice(&dest_id.to_be_bytes())
} }
@ -658,7 +741,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
#[inline] #[inline]
pub fn sp_header(&self) -> crate::zc::SpHeader { pub fn sp_header(&self) -> crate::zc::SpHeader {
// Valid minimum length of packet was checked before. // Valid minimum length of packet was checked before.
crate::zc::SpHeader::from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap() crate::zc::SpHeader::read_from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap()
} }
/// Helper API to generate the portion of the secondary header without a timestamp from the /// Helper API to generate the portion of the secondary header without a timestamp from the
@ -666,7 +749,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
#[inline] #[inline]
pub fn sec_header_without_timestamp(&self) -> PusTmSecHeaderWithoutTimestamp { pub fn sec_header_without_timestamp(&self) -> PusTmSecHeaderWithoutTimestamp {
// Valid minimum length of packet was checked before. // Valid minimum length of packet was checked before.
PusTmSecHeaderWithoutTimestamp::from_bytes( PusTmSecHeaderWithoutTimestamp::read_from_bytes(
&self.raw_tm[CCSDS_HEADER_LEN..CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN], &self.raw_tm[CCSDS_HEADER_LEN..CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN],
) )
.unwrap() .unwrap()
@ -674,6 +757,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
/// Set the sequence count. Returns false and does not update the value if the passed value /// Set the sequence count. Returns false and does not update the value if the passed value
/// exceeds [MAX_SEQ_COUNT]. /// exceeds [MAX_SEQ_COUNT].
#[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool { pub fn set_seq_count(&mut self, seq_count: u16) -> bool {
if seq_count > MAX_SEQ_COUNT { if seq_count > MAX_SEQ_COUNT {
return false; return false;
@ -788,20 +872,20 @@ mod tests {
const DUMMY_DATA: &[u8] = &[0, 1, 2]; const DUMMY_DATA: &[u8] = &[0, 1, 2];
fn base_ping_reply_full_ctor(timestamp: &[u8]) -> PusTmCreator { fn base_ping_reply_full_ctor(timestamp: &[u8]) -> PusTmCreator {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let tm_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp); let tm_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp);
PusTmCreator::new_no_source_data(&mut sph, tm_header, true) PusTmCreator::new_no_source_data(sph, tm_header, true)
} }
fn ping_reply_with_data(timestamp: &[u8]) -> PusTmCreator { fn ping_reply_with_data(timestamp: &[u8]) -> PusTmCreator {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let tm_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp); let tm_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp);
PusTmCreator::new(&mut sph, tm_header, DUMMY_DATA, true) PusTmCreator::new(sph, tm_header, DUMMY_DATA, true)
} }
fn base_hk_reply<'a, 'b>(timestamp: &'a [u8], src_data: &'b [u8]) -> PusTmCreator<'a, 'b> { fn base_hk_reply<'a, 'b>(timestamp: &'a [u8], src_data: &'b [u8]) -> PusTmCreator<'a, 'b> {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let tc_header = PusTmSecondaryHeader::new_simple(3, 5, timestamp); let tc_header = PusTmSecondaryHeader::new_simple(3, 5, timestamp);
PusTmCreator::new(&mut sph, tc_header, src_data, true) PusTmCreator::new(sph, tc_header, src_data, true)
} }
fn dummy_timestamp() -> &'static [u8] { fn dummy_timestamp() -> &'static [u8] {
@ -816,11 +900,11 @@ mod tests {
} }
#[test] #[test]
fn test_basic_simple_api() { fn test_basic_simple_api() {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let time_provider = CdsTime::new_with_u16_days(0, 0); let time_provider = CdsTime::new_with_u16_days(0, 0);
let mut stamp_buf: [u8; 8] = [0; 8]; let mut stamp_buf: [u8; 8] = [0; 8];
let pus_tm = let pus_tm =
PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, None, true) PusTmCreator::new_simple(sph, 17, 2, &time_provider, &mut stamp_buf, &[], true)
.unwrap(); .unwrap();
verify_ping_reply(&pus_tm, false, 22, &[64, 0, 0, 0, 0, 0, 0]); verify_ping_reply(&pus_tm, false, 22, &[64, 0, 0, 0, 0, 0, 0]);
} }
@ -917,9 +1001,9 @@ mod tests {
#[test] #[test]
fn test_manual_field_update() { fn test_manual_field_update() {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let tc_header = PusTmSecondaryHeader::new_simple(17, 2, dummy_timestamp()); let tc_header = PusTmSecondaryHeader::new_simple(17, 2, dummy_timestamp());
let mut tm = PusTmCreator::new_no_source_data(&mut sph, tc_header, false); let mut tm = PusTmCreator::new_no_source_data(sph, tc_header, false);
tm.calc_crc_on_serialization = false; tm.calc_crc_on_serialization = false;
assert_eq!(tm.data_len(), 0x00); assert_eq!(tm.data_len(), 0x00);
let mut buf: [u8; 32] = [0; 32]; let mut buf: [u8; 32] = [0; 32];
@ -1147,7 +1231,7 @@ mod tests {
#[test] #[test]
fn test_sec_header_without_stamp() { fn test_sec_header_without_stamp() {
let sec_header = PusTmSecondaryHeader::new_simple_no_timestamp(17, 1); let sec_header = PusTmSecondaryHeader::new_simple_no_timestamp(17, 1);
assert_eq!(sec_header.time_stamp, &[]); assert_eq!(sec_header.timestamp, &[]);
} }
#[test] #[test]
@ -1207,11 +1291,11 @@ mod tests {
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
fn test_serialization_creator_serde() { fn test_serialization_creator_serde() {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let time_provider = CdsTime::new_with_u16_days(0, 0); let time_provider = CdsTime::new_with_u16_days(0, 0);
let mut stamp_buf: [u8; 8] = [0; 8]; let mut stamp_buf: [u8; 8] = [0; 8];
let pus_tm = let pus_tm =
PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, None, true) PusTmCreator::new_simple(sph, 17, 2, &time_provider, &mut stamp_buf, &[], true)
.unwrap(); .unwrap();
let output = to_allocvec(&pus_tm).unwrap(); let output = to_allocvec(&pus_tm).unwrap();
@ -1222,11 +1306,11 @@ mod tests {
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
fn test_serialization_reader_serde() { fn test_serialization_reader_serde() {
let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let sph = SpHeader::new_for_unseg_tm_checked(0x123, 0x234, 0).unwrap();
let time_provider = CdsTime::new_with_u16_days(0, 0); let time_provider = CdsTime::new_with_u16_days(0, 0);
let mut stamp_buf: [u8; 8] = [0; 8]; let mut stamp_buf: [u8; 8] = [0; 8];
let pus_tm = let pus_tm =
PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, None, true) PusTmCreator::new_simple(sph, 17, 2, &time_provider, &mut stamp_buf, &[], true)
.unwrap(); .unwrap();
let pus_tm_vec = pus_tm.to_vec().unwrap(); let pus_tm_vec = pus_tm.to_vec().unwrap();
let (tm_reader, _) = PusTmReader::new(&pus_tm_vec, time_provider.len_as_bytes()).unwrap(); let (tm_reader, _) = PusTmReader::new(&pus_tm_vec, time_provider.len_as_bytes()).unwrap();

View File

@ -22,10 +22,6 @@
//! //!
//! `spacepackets` supports various runtime environments and is also suitable for `no_std` environments. //! `spacepackets` supports various runtime environments and is also suitable for `no_std` environments.
//! //!
//! It also offers optional support for [`serde`](https://serde.rs/). This allows serializing and
//! deserializing them with an appropriate `serde` provider like
//! [`postcard`](https://github.com/jamesmunns/postcard).
//!
//! ### Default features //! ### Default features
//! //!
//! - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library. //! - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library.
@ -52,35 +48,30 @@
//! //!
//! ```rust //! ```rust
//! use spacepackets::SpHeader; //! use spacepackets::SpHeader;
//! let sp_header = SpHeader::tc_unseg(0x42, 12, 1).expect("Error creating CCSDS TC header"); //! let sp_header = SpHeader::new_for_unseg_tc_checked(0x42, 12, 1).expect("error creating CCSDS TC header");
//! println!("{:?}", sp_header); //! println!("{:?}", sp_header);
//! let mut ccsds_buf: [u8; 32] = [0; 32]; //! let mut ccsds_buf: [u8; 32] = [0; 32];
//! sp_header.write_to_be_bytes(&mut ccsds_buf).expect("Writing CCSDS TC header failed"); //! sp_header.write_to_be_bytes(&mut ccsds_buf).expect("Writing CCSDS TC header failed");
//! println!("{:x?}", &ccsds_buf[0..6]); //! println!("{:x?}", &ccsds_buf[0..6]);
//! ``` //! ```
#![no_std] #![no_std]
#![cfg_attr(docs_rs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
extern crate alloc; extern crate alloc;
#[cfg(any(feature = "std", test))] #[cfg(any(feature = "std", test))]
extern crate std; extern crate std;
use crate::ecss::CCSDS_HEADER_LEN; use core::{fmt::Debug, hash::Hash};
use core::{
fmt::{Debug, Display, Formatter},
hash::Hash,
};
use crc::{Crc, CRC_16_IBM_3740}; use crc::{Crc, CRC_16_IBM_3740};
use delegate::delegate; use delegate::delegate;
use zerocopy::{FromBytes, IntoBytes};
#[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub mod cfdp; pub mod cfdp;
pub mod ecss; pub mod ecss;
pub mod seq_count;
pub mod time; pub mod time;
pub mod util; pub mod util;
@ -88,6 +79,8 @@ mod private {
pub trait Sealed {} pub trait Sealed {}
} }
pub const CCSDS_HEADER_LEN: usize = core::mem::size_of::<crate::zc::SpHeader>();
/// CRC algorithm used by the PUS standard, the CCSDS TC standard and the CFDP standard. /// CRC algorithm used by the PUS standard, the CCSDS TC standard and the CFDP standard.
pub const CRC_CCITT_FALSE: Crc<u16> = Crc::<u16>::new(&CRC_16_IBM_3740); pub const CRC_CCITT_FALSE: Crc<u16> = Crc::<u16>::new(&CRC_16_IBM_3740);
@ -95,55 +88,24 @@ pub const MAX_APID: u16 = 2u16.pow(11) - 1;
pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1; pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1;
/// Generic error type when converting to and from raw byte slices. /// Generic error type when converting to and from raw byte slices.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum ByteConversionError { pub enum ByteConversionError {
/// The passed slice is too small. Returns the passed slice length and expected minimum size /// The passed slice is too small. Returns the passed slice length and expected minimum size
ToSliceTooSmall { #[error("target slice with size {found} is too small, expected size of at least {expected}")]
found: usize, ToSliceTooSmall { found: usize, expected: usize },
expected: usize,
},
/// The provider buffer is too small. Returns the passed slice length and expected minimum size /// The provider buffer is too small. Returns the passed slice length and expected minimum size
FromSliceTooSmall { #[error("source slice with size {found} too small, expected at least {expected} bytes")]
found: usize, FromSliceTooSmall { found: usize, expected: usize },
expected: usize,
},
/// The [zerocopy] library failed to write to bytes /// The [zerocopy] library failed to write to bytes
#[error("zerocopy serialization error")]
ZeroCopyToError, ZeroCopyToError,
/// The [zerocopy] library failed to read from bytes
#[error("zerocopy deserialization error")]
ZeroCopyFromError, ZeroCopyFromError,
} }
impl Display for ByteConversionError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
ByteConversionError::ToSliceTooSmall { found, expected } => {
write!(
f,
"target slice with size {} is too small, expected size of at least {}",
found, expected
)
}
ByteConversionError::FromSliceTooSmall { found, expected } => {
write!(
f,
"source slice with size {} too small, expected at least {} bytes",
found, expected
)
}
ByteConversionError::ZeroCopyToError => {
write!(f, "zerocopy serialization error")
}
ByteConversionError::ZeroCopyFromError => {
write!(f, "zerocopy deserialization error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for ByteConversionError {}
/// CCSDS packet type enumeration. /// CCSDS packet type enumeration.
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -207,18 +169,21 @@ pub struct PacketId {
} }
impl PartialEq for PacketId { impl PartialEq for PacketId {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.raw().eq(&other.raw()) self.raw().eq(&other.raw())
} }
} }
impl PartialOrd for PacketId { impl PartialOrd for PacketId {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
} }
} }
impl Ord for PacketId { impl Ord for PacketId {
#[inline]
fn cmp(&self, other: &Self) -> core::cmp::Ordering { fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.raw().cmp(&other.raw()) self.raw().cmp(&other.raw())
} }
@ -232,6 +197,7 @@ impl Hash for PacketId {
} }
impl Default for PacketId { impl Default for PacketId {
#[inline]
fn default() -> Self { fn default() -> Self {
PacketId { PacketId {
ptype: PacketType::Tm, ptype: PacketType::Tm,
@ -242,23 +208,34 @@ impl Default for PacketId {
} }
impl PacketId { impl PacketId {
pub const fn const_tc(sec_header: bool, apid: u16) -> Self { /// This constructor will panic if the passed APID exceeds [MAX_APID].
Self::const_new(PacketType::Tc, sec_header, apid) /// Use the checked constructor variants to avoid panics.
} #[inline]
pub const fn new_for_tc(sec_header: bool, apid: u16) -> Self {
pub const fn const_tm(sec_header: bool, apid: u16) -> Self {
Self::const_new(PacketType::Tm, sec_header, apid)
}
pub fn tc(sec_header: bool, apid: u16) -> Option<Self> {
Self::new(PacketType::Tc, sec_header, apid) Self::new(PacketType::Tc, sec_header, apid)
} }
pub fn tm(sec_header: bool, apid: u16) -> Option<Self> { /// This constructor will panic if the passed APID exceeds [MAX_APID].
/// Use the checked constructor variants to avoid panics.
#[inline]
pub const fn new_for_tm(sec_header: bool, apid: u16) -> Self {
Self::new(PacketType::Tm, sec_header, apid) Self::new(PacketType::Tm, sec_header, apid)
} }
pub const fn const_new(ptype: PacketType, sec_header: bool, apid: u16) -> Self { #[inline]
pub fn new_for_tc_checked(sec_header: bool, apid: u16) -> Option<Self> {
Self::new_checked(PacketType::Tc, sec_header, apid)
}
#[inline]
pub fn new_for_tm_checked(sec_header: bool, apid: u16) -> Option<Self> {
Self::new_checked(PacketType::Tm, sec_header, apid)
}
/// This constructor will panic if the passed APID exceeds [MAX_APID].
/// Use the checked variants to avoid panics.
#[inline]
pub const fn new(ptype: PacketType, sec_header: bool, apid: u16) -> Self {
if apid > MAX_APID { if apid > MAX_APID {
panic!("APID too large"); panic!("APID too large");
} }
@ -269,16 +246,18 @@ impl PacketId {
} }
} }
pub fn new(ptype: PacketType, sec_header_flag: bool, apid: u16) -> Option<PacketId> { #[inline]
pub fn new_checked(ptype: PacketType, sec_header_flag: bool, apid: u16) -> Option<PacketId> {
if apid > MAX_APID { if apid > MAX_APID {
return None; return None;
} }
Some(PacketId::const_new(ptype, sec_header_flag, apid)) Some(PacketId::new(ptype, sec_header_flag, apid))
} }
/// Set a new Application Process ID (APID). If the passed number is invalid, the APID will /// Set a new Application Process ID (APID). If the passed number is invalid, the APID will
/// not be set and false will be returned. The maximum allowed value for the 11-bit field is /// not be set and false will be returned. The maximum allowed value for the 11-bit field is
/// 2047 /// 2047
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool { pub fn set_apid(&mut self, apid: u16) -> bool {
if apid > MAX_APID { if apid > MAX_APID {
return false; return false;
@ -287,6 +266,7 @@ impl PacketId {
true true
} }
#[inline]
pub fn apid(&self) -> u16 { pub fn apid(&self) -> u16 {
self.apid self.apid
} }
@ -318,9 +298,10 @@ pub struct PacketSequenceCtrl {
} }
impl PacketSequenceCtrl { impl PacketSequenceCtrl {
/// const variant of [PacketSequenceCtrl::new], but panics if the sequence count exceeds /// This constructor panics if the sequence count exceeds [MAX_SEQ_COUNT].
/// [MAX_SEQ_COUNT]. /// Use [Self::new_checked] to avoid panics.
pub const fn const_new(seq_flags: SequenceFlags, seq_count: u16) -> PacketSequenceCtrl { #[inline]
pub const fn new(seq_flags: SequenceFlags, seq_count: u16) -> PacketSequenceCtrl {
if seq_count > MAX_SEQ_COUNT { if seq_count > MAX_SEQ_COUNT {
panic!("Sequence count too large"); panic!("Sequence count too large");
} }
@ -331,18 +312,22 @@ impl PacketSequenceCtrl {
} }
/// Returns [None] if the passed sequence count exceeds [MAX_SEQ_COUNT]. /// Returns [None] if the passed sequence count exceeds [MAX_SEQ_COUNT].
pub fn new(seq_flags: SequenceFlags, seq_count: u16) -> Option<PacketSequenceCtrl> { #[inline]
pub fn new_checked(seq_flags: SequenceFlags, seq_count: u16) -> Option<PacketSequenceCtrl> {
if seq_count > MAX_SEQ_COUNT { if seq_count > MAX_SEQ_COUNT {
return None; return None;
} }
Some(PacketSequenceCtrl::const_new(seq_flags, seq_count)) Some(PacketSequenceCtrl::new(seq_flags, seq_count))
} }
#[inline]
pub fn raw(&self) -> u16 { pub fn raw(&self) -> u16 {
((self.seq_flags as u16) << 14) | self.seq_count ((self.seq_flags as u16) << 14) | self.seq_count
} }
/// Set a new sequence count. If the passed number is invalid, the sequence count will not be /// Set a new sequence count. If the passed number is invalid, the sequence count will not be
/// set and false will be returned. The maximum allowed value for the 14-bit field is 16383. /// set and false will be returned. The maximum allowed value for the 14-bit field is 16383.
#[inline]
pub fn set_seq_count(&mut self, ssc: u16) -> bool { pub fn set_seq_count(&mut self, ssc: u16) -> bool {
if ssc > MAX_SEQ_COUNT { if ssc > MAX_SEQ_COUNT {
return false; return false;
@ -351,6 +336,7 @@ impl PacketSequenceCtrl {
true true
} }
#[inline]
pub fn seq_count(&self) -> u16 { pub fn seq_count(&self) -> u16 {
self.seq_count self.seq_count
} }
@ -392,6 +378,7 @@ pub trait CcsdsPacket {
/// Retrieve data length field /// Retrieve data length field
fn data_len(&self) -> u16; fn data_len(&self) -> u16;
/// Retrieve the total packet size based on the data length field /// Retrieve the total packet size based on the data length field
#[inline]
fn total_len(&self) -> usize { fn total_len(&self) -> usize {
usize::from(self.data_len()) + CCSDS_HEADER_LEN + 1 usize::from(self.data_len()) + CCSDS_HEADER_LEN + 1
} }
@ -480,9 +467,12 @@ pub struct SpHeader {
pub data_len: u16, pub data_len: u16,
} }
pub type SpacePacketHeader = SpHeader;
impl Default for SpHeader { impl Default for SpHeader {
/// The default function sets the sequence flag field to [SequenceFlags::Unsegmented]. The data /// The default function sets the sequence flag field to [SequenceFlags::Unsegmented] and the
/// length field is set to 1, which denotes an empty space packets. /// data length to 0.
#[inline]
fn default() -> Self { fn default() -> Self {
SpHeader { SpHeader {
version: 0, version: 0,
@ -491,12 +481,13 @@ impl Default for SpHeader {
seq_flags: SequenceFlags::Unsegmented, seq_flags: SequenceFlags::Unsegmented,
seq_count: 0, seq_count: 0,
}, },
data_len: 1, data_len: 0,
} }
} }
} }
impl SpHeader { impl SpHeader {
#[inline]
pub const fn new(packet_id: PacketId, psc: PacketSequenceCtrl, data_len: u16) -> Self { pub const fn new(packet_id: PacketId, psc: PacketSequenceCtrl, data_len: u16) -> Self {
Self { Self {
version: 0, version: 0,
@ -506,9 +497,43 @@ impl SpHeader {
} }
} }
/// const variant of the [SpHeader::new_fron_single_fields] function. Panics if the passed /// This constructor sets the sequence flag field to [SequenceFlags::Unsegmented] and the data
/// APID exceeds [MAX_APID] or the passed packet sequence count exceeds [MAX_SEQ_COUNT]. /// length to 0.
const fn const_new_from_single_fields( ///
/// This constructor will panic if the APID exceeds [MAX_APID].
#[inline]
pub const fn new_from_apid(apid: u16) -> Self {
SpHeader {
version: 0,
packet_id: PacketId::new(PacketType::Tm, false, apid),
psc: PacketSequenceCtrl {
seq_flags: SequenceFlags::Unsegmented,
seq_count: 0,
},
data_len: 0,
}
}
/// Checked variant of [Self::new_from_apid].
#[inline]
pub fn new_from_apid_checked(apid: u16) -> Option<Self> {
Some(SpHeader {
version: 0,
packet_id: PacketId::new_checked(PacketType::Tm, false, apid)?,
psc: PacketSequenceCtrl {
seq_flags: SequenceFlags::Unsegmented,
seq_count: 0,
},
data_len: 0,
})
}
/// This constructor panics if the passed APID exceeds [MAX_APID] or the passed packet sequence
/// count exceeds [MAX_SEQ_COUNT].
///
/// The checked constructor variants can be used to avoid panics.
#[inline]
pub const fn new_from_fields(
ptype: PacketType, ptype: PacketType,
sec_header: bool, sec_header: bool,
apid: u16, apid: u16,
@ -523,17 +548,20 @@ impl SpHeader {
panic!("APID is too large"); panic!("APID is too large");
} }
Self { Self {
psc: PacketSequenceCtrl::const_new(seq_flags, seq_count), psc: PacketSequenceCtrl::new(seq_flags, seq_count),
packet_id: PacketId::const_new(ptype, sec_header, apid), packet_id: PacketId::new(ptype, sec_header, apid),
data_len, data_len,
version: 0, version: 0,
} }
} }
/// Create a new Space Packet Header instance which can be used to create generic /// Create a new Space Packet Header instance which can be used to create generic
/// Space Packets. This will return [None] if the APID or sequence count argument /// Space Packets.
///
/// This will return [None] if the APID or sequence count argument
/// exceed [MAX_APID] or [MAX_SEQ_COUNT] respectively. The version field is set to 0b000. /// exceed [MAX_APID] or [MAX_SEQ_COUNT] respectively. The version field is set to 0b000.
pub fn new_from_single_fields( #[inline]
pub fn new_from_fields_checked(
ptype: PacketType, ptype: PacketType,
sec_header: bool, sec_header: bool,
apid: u16, apid: u16,
@ -544,56 +572,117 @@ impl SpHeader {
if seq_count > MAX_SEQ_COUNT || apid > MAX_APID { if seq_count > MAX_SEQ_COUNT || apid > MAX_APID {
return None; return None;
} }
Some(SpHeader::const_new_from_single_fields( Some(SpHeader::new_from_fields(
ptype, sec_header, apid, seq_flags, seq_count, data_len, ptype, sec_header, apid, seq_flags, seq_count, data_len,
)) ))
} }
/// Helper function for telemetry space packet headers. The packet type field will be /// Helper function for telemetry space packet headers. The packet type field will be
/// set accordingly. The secondary header flag field is set to false. /// set accordingly. The secondary header flag field is set to false.
pub fn tm(apid: u16, seq_flags: SequenceFlags, seq_count: u16, data_len: u16) -> Option<Self> { #[inline]
Self::new_from_single_fields(PacketType::Tm, false, apid, seq_flags, seq_count, data_len) pub fn new_for_tm_checked(
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Option<Self> {
Self::new_from_fields_checked(PacketType::Tm, false, apid, seq_flags, seq_count, data_len)
} }
/// Helper function for telemetry space packet headers. The packet type field will be /// Helper function for telemetry space packet headers. The packet type field will be
/// set accordingly. The secondary header flag field is set to false. /// set accordingly. The secondary header flag field is set to false.
pub fn tc(apid: u16, seq_flags: SequenceFlags, seq_count: u16, data_len: u16) -> Option<Self> { #[inline]
Self::new_from_single_fields(PacketType::Tc, false, apid, seq_flags, seq_count, data_len) pub fn new_for_tc_checked(
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Option<Self> {
Self::new_from_fields_checked(PacketType::Tc, false, apid, seq_flags, seq_count, data_len)
} }
/// Variant of [SpHeader::tm] which sets the sequence flag field to [SequenceFlags::Unsegmented] /// This is an unchecked constructor which can panic on invalid input.
pub fn tm_unseg(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> { #[inline]
Self::tm(apid, SequenceFlags::Unsegmented, seq_count, data_len) pub const fn new_for_tm(
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Self {
Self::new_from_fields(PacketType::Tm, false, apid, seq_flags, seq_count, data_len)
} }
/// Variant of [SpHeader::tc] which sets the sequence flag field to [SequenceFlags::Unsegmented] /// This is an unchecked constructor which can panic on invalid input.
pub fn tc_unseg(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> { #[inline]
Self::tc(apid, SequenceFlags::Unsegmented, seq_count, data_len) pub const fn new_for_tc(
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Self {
Self::new_from_fields(PacketType::Tc, false, apid, seq_flags, seq_count, data_len)
} }
//noinspection RsTraitImplementation /// Variant of [SpHeader::new_for_tm_checked] which sets the sequence flag field to [SequenceFlags::Unsegmented]
delegate!(to self.packet_id { #[inline]
/// Returns [false] and fails if the APID exceeds [MAX_APID] pub fn new_for_unseg_tm_checked(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
pub fn set_apid(&mut self, apid: u16) -> bool; Self::new_for_tm_checked(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}); }
delegate!(to self.psc { /// Variant of [SpHeader::new_for_tc_checked] which sets the sequence flag field to [SequenceFlags::Unsegmented]
/// Returns [false] and fails if the sequence count exceeds [MAX_SEQ_COUNT] #[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool; pub fn new_for_unseg_tc_checked(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
}); Self::new_for_tc_checked(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
/// Variant of [SpHeader::new_for_tc] which sets the sequence flag field to [SequenceFlags::Unsegmented].
///
/// This is an unchecked constructor which can panic on invalid input.
#[inline]
pub const fn new_for_unseg_tc(apid: u16, seq_count: u16, data_len: u16) -> Self {
Self::new_for_tc(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
/// Variant of [SpHeader::new_for_tm] which sets the sequence flag field to [SequenceFlags::Unsegmented].
///
/// This is an unchecked constructor which can panic on invalid input.
#[inline]
pub const fn new_for_unseg_tm(apid: u16, seq_count: u16, data_len: u16) -> Self {
Self::new_for_tm(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
delegate! {
to self.packet_id {
/// Returns [false] and fails if the APID exceeds [MAX_APID]
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool;
}
}
delegate! {
to self.psc {
/// Returns [false] and fails if the sequence count exceeds [MAX_SEQ_COUNT]
#[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool;
}
}
#[inline]
pub fn set_seq_flags(&mut self, seq_flags: SequenceFlags) { pub fn set_seq_flags(&mut self, seq_flags: SequenceFlags) {
self.psc.seq_flags = seq_flags; self.psc.seq_flags = seq_flags;
} }
#[inline]
pub fn set_sec_header_flag(&mut self) { pub fn set_sec_header_flag(&mut self) {
self.packet_id.sec_header_flag = true; self.packet_id.sec_header_flag = true;
} }
#[inline]
pub fn clear_sec_header_flag(&mut self) { pub fn clear_sec_header_flag(&mut self) {
self.packet_id.sec_header_flag = false; self.packet_id.sec_header_flag = false;
} }
#[inline]
pub fn set_packet_type(&mut self, packet_type: PacketType) { pub fn set_packet_type(&mut self, packet_type: PacketType) {
self.packet_id.ptype = packet_type; self.packet_id.ptype = packet_type;
} }
@ -608,8 +697,8 @@ impl SpHeader {
expected: CCSDS_HEADER_LEN, expected: CCSDS_HEADER_LEN,
}); });
} }
let zc_header = zc::SpHeader::from_bytes(&buf[0..CCSDS_HEADER_LEN]) let zc_header = zc::SpHeader::read_from_bytes(&buf[0..CCSDS_HEADER_LEN])
.ok_or(ByteConversionError::ZeroCopyFromError)?; .map_err(|_| ByteConversionError::ZeroCopyFromError)?;
Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..])) Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..]))
} }
@ -627,10 +716,19 @@ impl SpHeader {
} }
let zc_header: zc::SpHeader = zc::SpHeader::from(*self); let zc_header: zc::SpHeader = zc::SpHeader::from(*self);
zc_header zc_header
.to_bytes(&mut buf[0..CCSDS_HEADER_LEN]) .write_to(&mut buf[0..CCSDS_HEADER_LEN])
.ok_or(ByteConversionError::ZeroCopyToError)?; .map_err(|_| ByteConversionError::ZeroCopyToError)?;
Ok(&mut buf[CCSDS_HEADER_LEN..]) Ok(&mut buf[CCSDS_HEADER_LEN..])
} }
/// Create a vector containing the CCSDS header.
#[cfg(feature = "alloc")]
pub fn to_vec(&self) -> alloc::vec::Vec<u8> {
let mut vec = alloc::vec![0; CCSDS_HEADER_LEN];
// This can not fail.
self.write_to_be_bytes(&mut vec[..]).unwrap();
vec
}
} }
impl CcsdsPacket for SpHeader { impl CcsdsPacket for SpHeader {
@ -656,6 +754,7 @@ impl CcsdsPacket for SpHeader {
} }
impl CcsdsPrimaryHeader for SpHeader { impl CcsdsPrimaryHeader for SpHeader {
#[inline]
fn from_composite_fields( fn from_composite_fields(
packet_id: PacketId, packet_id: PacketId,
psc: PacketSequenceCtrl, psc: PacketSequenceCtrl,
@ -680,9 +779,9 @@ sph_from_other!(SpHeader, crate::zc::SpHeader);
pub mod zc { pub mod zc {
use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK}; use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK};
use zerocopy::byteorder::NetworkEndian; use zerocopy::byteorder::NetworkEndian;
use zerocopy::{AsBytes, FromBytes, FromZeroes, Unaligned, U16}; use zerocopy::{FromBytes, Immutable, IntoBytes, Unaligned, U16};
#[derive(FromBytes, FromZeroes, AsBytes, Unaligned, Debug)] #[derive(FromBytes, IntoBytes, Immutable, Unaligned, Debug)]
#[repr(C)] #[repr(C)]
pub struct SpHeader { pub struct SpHeader {
version_packet_id: U16<NetworkEndian>, version_packet_id: U16<NetworkEndian>,
@ -707,14 +806,6 @@ pub mod zc {
data_len: U16::from(data_len), data_len: U16::from(data_len),
} }
} }
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
SpHeader::read_from(slice)
}
pub fn to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
} }
impl CcsdsPacket for SpHeader { impl CcsdsPacket for SpHeader {
@ -723,9 +814,12 @@ pub mod zc {
((self.version_packet_id.get() >> 13) as u8) & 0b111 ((self.version_packet_id.get() >> 13) as u8) & 0b111
} }
#[inline]
fn packet_id(&self) -> PacketId { fn packet_id(&self) -> PacketId {
PacketId::from(self.packet_id_raw()) PacketId::from(self.packet_id_raw())
} }
#[inline]
fn psc(&self) -> PacketSequenceCtrl { fn psc(&self) -> PacketSequenceCtrl {
PacketSequenceCtrl::from(self.psc_raw()) PacketSequenceCtrl::from(self.psc_raw())
} }
@ -735,10 +829,12 @@ pub mod zc {
self.data_len.get() self.data_len.get()
} }
#[inline]
fn packet_id_raw(&self) -> u16 { fn packet_id_raw(&self) -> u16 {
self.version_packet_id.get() & (!VERSION_MASK) self.version_packet_id.get() & (!VERSION_MASK)
} }
#[inline]
fn psc_raw(&self) -> u16 { fn psc_raw(&self) -> u16 {
self.psc.get() self.psc.get()
} }
@ -762,6 +858,8 @@ pub mod zc {
pub(crate) mod tests { pub(crate) mod tests {
use std::collections::HashSet; use std::collections::HashSet;
#[allow(unused_imports)]
use crate::ByteConversionError;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use crate::CcsdsPrimaryHeader; use crate::CcsdsPrimaryHeader;
use crate::{ use crate::{
@ -776,14 +874,15 @@ pub(crate) mod tests {
use postcard::{from_bytes, to_allocvec}; use postcard::{from_bytes, to_allocvec};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
use zerocopy::FromBytes;
const CONST_SP: SpHeader = SpHeader::new( const CONST_SP: SpHeader = SpHeader::new(
PacketId::const_tc(true, 0x36), PacketId::new_for_tc(true, 0x36),
PacketSequenceCtrl::const_new(SequenceFlags::ContinuationSegment, 0x88), PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 0x88),
0x90, 0x90,
); );
const PACKET_ID_TM: PacketId = PacketId::const_tm(true, 0x22); const PACKET_ID_TM: PacketId = PacketId::new_for_tm(true, 0x22);
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub(crate) fn generic_serde_test<T: Serialize + DeserializeOwned + PartialEq + Debug>( pub(crate) fn generic_serde_test<T: Serialize + DeserializeOwned + PartialEq + Debug>(
@ -795,11 +894,12 @@ pub(crate) mod tests {
} }
#[test] #[test]
#[allow(clippy::assertions_on_constants)]
fn verify_const_packet_id() { fn verify_const_packet_id() {
assert_eq!(PACKET_ID_TM.apid(), 0x22); assert_eq!(PACKET_ID_TM.apid(), 0x22);
assert!(PACKET_ID_TM.sec_header_flag); assert!(PACKET_ID_TM.sec_header_flag);
assert_eq!(PACKET_ID_TM.ptype, PacketType::Tm); assert_eq!(PACKET_ID_TM.ptype, PacketType::Tm);
let const_tc_id = PacketId::const_tc(true, 0x23); let const_tc_id = PacketId::new_for_tc(true, 0x23);
assert_eq!(const_tc_id.ptype, PacketType::Tc); assert_eq!(const_tc_id.ptype, PacketType::Tc);
} }
@ -813,27 +913,27 @@ pub(crate) mod tests {
#[test] #[test]
fn test_packet_id_ctors() { fn test_packet_id_ctors() {
let packet_id = PacketId::new(PacketType::Tc, true, 0x1ff); let packet_id = PacketId::new_checked(PacketType::Tc, true, 0x1ff);
assert!(packet_id.is_some()); assert!(packet_id.is_some());
let packet_id = packet_id.unwrap(); let packet_id = packet_id.unwrap();
assert_eq!(packet_id.apid(), 0x1ff); assert_eq!(packet_id.apid(), 0x1ff);
assert_eq!(packet_id.ptype, PacketType::Tc); assert_eq!(packet_id.ptype, PacketType::Tc);
assert!(packet_id.sec_header_flag); assert!(packet_id.sec_header_flag);
let packet_id_tc = PacketId::tc(true, 0x1ff); let packet_id_tc = PacketId::new_for_tc_checked(true, 0x1ff);
assert!(packet_id_tc.is_some()); assert!(packet_id_tc.is_some());
let packet_id_tc = packet_id_tc.unwrap(); let packet_id_tc = packet_id_tc.unwrap();
assert_eq!(packet_id_tc, packet_id); assert_eq!(packet_id_tc, packet_id);
let packet_id_tm = PacketId::tm(true, 0x2ff); let packet_id_tm = PacketId::new_for_tm_checked(true, 0x2ff);
assert!(packet_id_tm.is_some()); assert!(packet_id_tm.is_some());
let packet_id_tm = packet_id_tm.unwrap(); let packet_id_tm = packet_id_tm.unwrap();
assert_eq!(packet_id_tm.sec_header_flag, true); assert!(packet_id_tm.sec_header_flag);
assert_eq!(packet_id_tm.ptype, PacketType::Tm); assert_eq!(packet_id_tm.ptype, PacketType::Tm);
assert_eq!(packet_id_tm.apid, 0x2ff); assert_eq!(packet_id_tm.apid, 0x2ff);
} }
#[test] #[test]
fn verify_const_sp_header() { fn verify_const_sp_header() {
assert_eq!(CONST_SP.sec_header_flag(), true); assert!(CONST_SP.sec_header_flag());
assert_eq!(CONST_SP.apid(), 0x36); assert_eq!(CONST_SP.apid(), 0x36);
assert_eq!( assert_eq!(
CONST_SP.sequence_flags(), CONST_SP.sequence_flags(),
@ -874,7 +974,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_packet_id() { fn test_packet_id() {
let packet_id = let packet_id =
PacketId::new(PacketType::Tm, false, 0x42).expect("Packet ID creation failed"); PacketId::new_checked(PacketType::Tm, false, 0x42).expect("Packet ID creation failed");
assert_eq!(packet_id.raw(), 0x0042); assert_eq!(packet_id.raw(), 0x0042);
let packet_id_from_raw = PacketId::from(packet_id.raw()); let packet_id_from_raw = PacketId::from(packet_id.raw());
assert_eq!( assert_eq!(
@ -882,26 +982,26 @@ pub(crate) mod tests {
PacketType::Tm PacketType::Tm
); );
assert_eq!(packet_id_from_raw, packet_id); assert_eq!(packet_id_from_raw, packet_id);
let packet_id_from_new = PacketId::new(PacketType::Tm, false, 0x42).unwrap(); let packet_id_from_new = PacketId::new_checked(PacketType::Tm, false, 0x42).unwrap();
assert_eq!(packet_id_from_new, packet_id); assert_eq!(packet_id_from_new, packet_id);
} }
#[test] #[test]
fn test_invalid_packet_id() { fn test_invalid_packet_id() {
let packet_id_invalid = PacketId::new(PacketType::Tc, true, 0xFFFF); let packet_id_invalid = PacketId::new_checked(PacketType::Tc, true, 0xFFFF);
assert!(packet_id_invalid.is_none()); assert!(packet_id_invalid.is_none());
} }
#[test] #[test]
fn test_invalid_apid_setter() { fn test_invalid_apid_setter() {
let mut packet_id = let mut packet_id =
PacketId::new(PacketType::Tm, false, 0x42).expect("Packet ID creation failed"); PacketId::new_checked(PacketType::Tm, false, 0x42).expect("Packet ID creation failed");
assert!(!packet_id.set_apid(0xffff)); assert!(!packet_id.set_apid(0xffff));
} }
#[test] #[test]
fn test_invalid_seq_count() { fn test_invalid_seq_count() {
let mut psc = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77) let mut psc = PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77)
.expect("PSC creation failed"); .expect("PSC creation failed");
assert_eq!(psc.seq_count(), 77); assert_eq!(psc.seq_count(), 77);
assert!(!psc.set_seq_count(0xffff)); assert!(!psc.set_seq_count(0xffff));
@ -909,7 +1009,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_packet_seq_ctrl() { fn test_packet_seq_ctrl() {
let mut psc = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77) let mut psc = PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77)
.expect("PSC creation failed"); .expect("PSC creation failed");
assert_eq!(psc.raw(), 77); assert_eq!(psc.raw(), 77);
let psc_from_raw = PacketSequenceCtrl::from(psc.raw()); let psc_from_raw = PacketSequenceCtrl::from(psc.raw());
@ -918,16 +1018,18 @@ pub(crate) mod tests {
assert!(!psc.set_seq_count(2u16.pow(15))); assert!(!psc.set_seq_count(2u16.pow(15)));
assert_eq!(psc.raw(), 77); assert_eq!(psc.raw(), 77);
let psc_invalid = PacketSequenceCtrl::new(SequenceFlags::FirstSegment, 0xFFFF); let psc_invalid = PacketSequenceCtrl::new_checked(SequenceFlags::FirstSegment, 0xFFFF);
assert!(psc_invalid.is_none()); assert!(psc_invalid.is_none());
let psc_from_new = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77).unwrap(); let psc_from_new =
PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77).unwrap();
assert_eq!(psc_from_new, psc); assert_eq!(psc_from_new, psc);
} }
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
fn test_serde_sph() { fn test_serde_sph() {
let sp_header = SpHeader::tc_unseg(0x42, 12, 0).expect("Error creating SP header"); let sp_header =
SpHeader::new_for_unseg_tc_checked(0x42, 12, 0).expect("Error creating SP header");
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert!(sp_header.is_tc()); assert!(sp_header.is_tc());
assert!(!sp_header.sec_header_flag()); assert!(!sp_header.sec_header_flag());
@ -949,7 +1051,8 @@ pub(crate) mod tests {
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert_eq!(sp_header.data_len, 0); assert_eq!(sp_header.data_len, 0);
let sp_header = SpHeader::tm_unseg(0x7, 22, 36).expect("Error creating SP header"); let sp_header =
SpHeader::new_for_unseg_tm_checked(0x7, 22, 36).expect("Error creating SP header");
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert!(sp_header.is_tm()); assert!(sp_header.is_tm());
assert!(!sp_header.sec_header_flag()); assert!(!sp_header.sec_header_flag());
@ -963,8 +1066,8 @@ pub(crate) mod tests {
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
let from_comp_fields = SpHeader::from_composite_fields( let from_comp_fields = SpHeader::from_composite_fields(
PacketId::new(PacketType::Tc, true, 0x42).unwrap(), PacketId::new(PacketType::Tc, true, 0x42),
PacketSequenceCtrl::new(SequenceFlags::Unsegmented, 0x7).unwrap(), PacketSequenceCtrl::new(SequenceFlags::Unsegmented, 0x7),
0, 0,
None, None,
); );
@ -981,7 +1084,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_setters() { fn test_setters() {
let sp_header = SpHeader::tc(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::new_for_tc_checked(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let mut sp_header = sp_header.unwrap(); let mut sp_header = sp_header.unwrap();
sp_header.set_apid(0x12); sp_header.set_apid(0x12);
@ -999,7 +1102,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_tc_ctor() { fn test_tc_ctor() {
let sp_header = SpHeader::tc(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::new_for_tc_checked(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tc, &sp_header); verify_sp_fields(PacketType::Tc, &sp_header);
@ -1007,23 +1110,35 @@ pub(crate) mod tests {
#[test] #[test]
fn test_tc_ctor_unseg() { fn test_tc_ctor_unseg() {
let sp_header = SpHeader::tc_unseg(0x42, 25, 0); let sp_header = SpHeader::new_for_unseg_tc_checked(0x42, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tc, &sp_header); verify_sp_fields(PacketType::Tc, &sp_header);
} }
#[test]
fn test_tc_ctor_unseg_const() {
let sp_header = SpHeader::new_for_unseg_tc(0x42, 25, 0);
verify_sp_fields(PacketType::Tc, &sp_header);
}
#[test] #[test]
fn test_tm_ctor() { fn test_tm_ctor() {
let sp_header = SpHeader::tm(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::new_for_tm_checked(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tm, &sp_header); verify_sp_fields(PacketType::Tm, &sp_header);
} }
#[test]
fn test_tm_ctor_const() {
let sp_header = SpHeader::new_for_tm(0x42, SequenceFlags::Unsegmented, 25, 0);
verify_sp_fields(PacketType::Tm, &sp_header);
}
#[test] #[test]
fn test_tm_ctor_unseg() { fn test_tm_ctor_unseg() {
let sp_header = SpHeader::tm_unseg(0x42, 25, 0); let sp_header = SpHeader::new_for_unseg_tm_checked(0x42, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tm, &sp_header); verify_sp_fields(PacketType::Tm, &sp_header);
@ -1039,10 +1154,10 @@ pub(crate) mod tests {
#[test] #[test]
fn test_zc_sph() { fn test_zc_sph() {
use zerocopy::AsBytes; use zerocopy::IntoBytes;
let sp_header = let sp_header = SpHeader::new_for_unseg_tc_checked(0x7FF, pow(2, 14) - 1, 0)
SpHeader::tc_unseg(0x7FF, pow(2, 14) - 1, 0).expect("Error creating SP header"); .expect("Error creating SP header");
assert_eq!(sp_header.ptype(), PacketType::Tc); assert_eq!(sp_header.ptype(), PacketType::Tc);
assert_eq!(sp_header.apid(), 0x7FF); assert_eq!(sp_header.apid(), 0x7FF);
assert_eq!(sp_header.data_len(), 0); assert_eq!(sp_header.data_len(), 0);
@ -1059,7 +1174,7 @@ pub(crate) mod tests {
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let mut slice = [0; 6]; let mut slice = [0; 6];
sp_header_zc.write_to(slice.as_mut_slice()); sp_header_zc.write_to(slice.as_mut_slice()).unwrap();
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
assert_eq!(slice[1], 0xFF); assert_eq!(slice[1], 0xFF);
@ -1070,7 +1185,7 @@ pub(crate) mod tests {
let mut test_vec = vec![0_u8; 6]; let mut test_vec = vec![0_u8; 6];
let slice = test_vec.as_mut_slice(); let slice = test_vec.as_mut_slice();
sp_header_zc.write_to(slice); sp_header_zc.write_to(slice).unwrap();
let slice = test_vec.as_slice(); let slice = test_vec.as_slice();
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
@ -1080,8 +1195,8 @@ pub(crate) mod tests {
assert_eq!(slice[4], 0x00); assert_eq!(slice[4], 0x00);
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let sp_header = zc::SpHeader::from_bytes(slice); let sp_header = zc::SpHeader::read_from_bytes(slice);
assert!(sp_header.is_some()); assert!(sp_header.is_ok());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert_eq!(sp_header.packet_id_raw(), 0x17FF); assert_eq!(sp_header.packet_id_raw(), 0x17FF);
@ -1107,4 +1222,40 @@ pub(crate) mod tests {
let mut id_set = HashSet::new(); let mut id_set = HashSet::new();
id_set.insert(PacketId::from(1_u16)); id_set.insert(PacketId::from(1_u16));
} }
#[test]
fn sp_header_from_apid() {
let sp_header = SpHeader::new_from_apid(0x03);
assert_eq!(sp_header.apid(), 0x03);
assert_eq!(sp_header.data_len(), 0);
}
#[test]
fn sp_header_from_apid_checked() {
let sp_header = SpHeader::new_from_apid_checked(0x03).unwrap();
assert_eq!(sp_header.apid(), 0x03);
assert_eq!(sp_header.data_len(), 0);
}
#[cfg(feature = "defmt")]
fn is_defmt_format<T: defmt::Format>(_t: T) {}
#[test]
#[cfg(feature = "defmt")]
fn test_defmt_format() {
is_defmt_format(ByteConversionError::ToSliceTooSmall {
found: 1,
expected: 2,
});
}
#[test]
fn test_sp_header_as_vec() {
let sp_header = SpHeader::new_for_unseg_tc(0x42, 25, 1);
let sp_header_as_vec = sp_header.to_vec();
let sp_header_read_back = SpHeader::from_be_bytes(&sp_header_as_vec)
.expect("Error reading back SP header")
.0;
assert_eq!(sp_header, sp_header_read_back);
}
} }

250
src/seq_count.rs Normal file
View File

@ -0,0 +1,250 @@
use crate::MAX_SEQ_COUNT;
use core::cell::Cell;
use paste::paste;
#[cfg(feature = "std")]
pub use stdmod::*;
/// Core trait for objects which can provide a sequence count.
///
/// The core functions are not mutable on purpose to allow easier usage with
/// static structs when using the interior mutability pattern. This can be achieved by using
/// [Cell], [core::cell::RefCell] or atomic types.
pub trait SequenceCountProvider {
type Raw: Into<u64>;
const MAX_BIT_WIDTH: usize;
fn get(&self) -> Self::Raw;
fn increment(&self);
fn get_and_increment(&self) -> Self::Raw {
let val = self.get();
self.increment();
val
}
}
#[derive(Clone)]
pub struct SeqCountProviderSimple<T: Copy> {
seq_count: Cell<T>,
max_val: T,
}
macro_rules! impl_for_primitives {
($($ty: ident,)+) => {
$(
paste! {
impl SeqCountProviderSimple<$ty> {
pub fn [<new_custom_max_val_ $ty>](max_val: $ty) -> Self {
Self {
seq_count: Cell::new(0),
max_val,
}
}
pub fn [<new_ $ty>]() -> Self {
Self {
seq_count: Cell::new(0),
max_val: $ty::MAX
}
}
}
impl Default for SeqCountProviderSimple<$ty> {
fn default() -> Self {
Self::[<new_ $ty>]()
}
}
impl SequenceCountProvider for SeqCountProviderSimple<$ty> {
type Raw = $ty;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
fn get(&self) -> Self::Raw {
self.seq_count.get()
}
fn increment(&self) {
self.get_and_increment();
}
fn get_and_increment(&self) -> Self::Raw {
let curr_count = self.seq_count.get();
if curr_count == self.max_val {
self.seq_count.set(0);
} else {
self.seq_count.set(curr_count + 1);
}
curr_count
}
}
}
)+
}
}
impl_for_primitives!(u8, u16, u32, u64,);
/// This is a sequence count provider which wraps around at [MAX_SEQ_COUNT].
#[derive(Clone)]
pub struct CcsdsSimpleSeqCountProvider {
provider: SeqCountProviderSimple<u16>,
}
impl Default for CcsdsSimpleSeqCountProvider {
fn default() -> Self {
Self {
provider: SeqCountProviderSimple::new_custom_max_val_u16(MAX_SEQ_COUNT),
}
}
}
impl SequenceCountProvider for CcsdsSimpleSeqCountProvider {
type Raw = u16;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
delegate::delegate! {
to self.provider {
fn get(&self) -> u16;
fn increment(&self);
fn get_and_increment(&self) -> u16;
}
}
}
#[cfg(feature = "std")]
pub mod stdmod {
use super::*;
use std::sync::{Arc, Mutex};
macro_rules! sync_clonable_seq_counter_impl {
($($ty: ident,)+) => {
$(paste! {
/// These sequence counters can be shared between threads and can also be
/// configured to wrap around at specified maximum values. Please note that
/// that the API provided by this class will not panic und [Mutex] lock errors,
/// but it will yield 0 for the getter functions.
#[derive(Clone, Default)]
pub struct [<SeqCountProviderSync $ty:upper>] {
seq_count: Arc<Mutex<$ty>>,
max_val: $ty
}
impl [<SeqCountProviderSync $ty:upper>] {
pub fn new() -> Self {
Self::new_with_max_val($ty::MAX)
}
pub fn new_with_max_val(max_val: $ty) -> Self {
Self {
seq_count: Arc::default(),
max_val
}
}
}
impl SequenceCountProvider for [<SeqCountProviderSync $ty:upper>] {
type Raw = $ty;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
fn get(&self) -> $ty {
match self.seq_count.lock() {
Ok(counter) => *counter,
Err(_) => 0
}
}
fn increment(&self) {
self.get_and_increment();
}
fn get_and_increment(&self) -> $ty {
match self.seq_count.lock() {
Ok(mut counter) => {
let val = *counter;
if val == self.max_val {
*counter = 0;
} else {
*counter += 1;
}
val
}
Err(_) => 0,
}
}
}
})+
}
}
sync_clonable_seq_counter_impl!(u8, u16, u32, u64,);
}
#[cfg(test)]
mod tests {
use crate::seq_count::{
CcsdsSimpleSeqCountProvider, SeqCountProviderSimple, SeqCountProviderSyncU8,
SequenceCountProvider,
};
use crate::MAX_SEQ_COUNT;
#[test]
fn test_u8_counter() {
let u8_counter = SeqCountProviderSimple::<u8>::default();
assert_eq!(u8_counter.get(), 0);
assert_eq!(u8_counter.get_and_increment(), 0);
assert_eq!(u8_counter.get_and_increment(), 1);
assert_eq!(u8_counter.get(), 2);
}
#[test]
fn test_u8_counter_overflow() {
let u8_counter = SeqCountProviderSimple::new_u8();
for _ in 0..256 {
u8_counter.increment();
}
assert_eq!(u8_counter.get(), 0);
}
#[test]
fn test_ccsds_counter() {
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
assert_eq!(ccsds_counter.get(), 0);
assert_eq!(ccsds_counter.get_and_increment(), 0);
assert_eq!(ccsds_counter.get_and_increment(), 1);
assert_eq!(ccsds_counter.get(), 2);
}
#[test]
fn test_ccsds_counter_overflow() {
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
for _ in 0..MAX_SEQ_COUNT + 1 {
ccsds_counter.increment();
}
assert_eq!(ccsds_counter.get(), 0);
}
#[test]
fn test_atomic_ref_counters() {
let sync_u8_counter = SeqCountProviderSyncU8::new();
assert_eq!(sync_u8_counter.get(), 0);
assert_eq!(sync_u8_counter.get_and_increment(), 0);
assert_eq!(sync_u8_counter.get_and_increment(), 1);
assert_eq!(sync_u8_counter.get(), 2);
}
#[test]
fn test_atomic_ref_counters_overflow() {
let sync_u8_counter = SeqCountProviderSyncU8::new();
for _ in 0..u8::MAX as u16 + 1 {
sync_u8_counter.increment();
}
assert_eq!(sync_u8_counter.get(), 0);
}
#[test]
fn test_atomic_ref_counters_overflow_custom_max_val() {
let sync_u8_counter = SeqCountProviderSyncU8::new_with_max_val(128);
for _ in 0..129 {
sync_u8_counter.increment();
}
assert_eq!(sync_u8_counter.get(), 0);
}
}

View File

@ -71,7 +71,19 @@ mod tests {
use std::format; use std::format;
#[test] #[test]
fn test_ascii_timestamp_a_unterminated() { fn test_ascii_timestamp_a_unterminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
assert_eq!(stamp.len(), FMT_STR_CODE_A_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_unterminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_a(&date); let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -82,7 +94,24 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_a_terminated() { fn test_ascii_timestamp_a_terminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_terminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_a_terminated(&date); let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -99,7 +128,19 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_b_unterminated() { fn test_ascii_timestamp_b_unterminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
assert_eq!(stamp.len(), FMT_STR_CODE_B_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_unterminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_b(&date); let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -110,7 +151,25 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_b_terminated() { fn test_ascii_timestamp_b_terminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_terminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_b_terminated(&date); let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);

View File

@ -7,17 +7,13 @@
use crate::private::Sealed; use crate::private::Sealed;
use crate::ByteConversionError; use crate::ByteConversionError;
use core::cmp::Ordering; use core::cmp::Ordering;
use core::fmt::{Debug, Display, Formatter}; use core::fmt::Debug;
use core::ops::{Add, AddAssign}; use core::ops::{Add, AddAssign};
use core::time::Duration; use core::time::Duration;
use delegate::delegate;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use super::StdTimestampError; use super::StdTimestampError;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "std")]
use std::time::{SystemTime, SystemTimeError}; use std::time::{SystemTime, SystemTimeError};
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
@ -93,49 +89,19 @@ pub enum SubmillisPrecision {
Reserved = 0b11, Reserved = 0b11,
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum CdsError { pub enum CdsError {
/// CCSDS days value exceeds maximum allowed size or is negative /// CCSDS days value exceeds maximum allowed size or is negative
#[error("invalid ccsds days {0}")]
InvalidCcsdsDays(i64), InvalidCcsdsDays(i64),
/// There are distinct constructors depending on the days field width detected in the preamble /// There are distinct constructors depending on the days field width detected in the preamble
/// field. This error will be returned if there is a missmatch. /// field. This error will be returned if there is a missmatch.
#[error("wrong constructor for length of day {0:?} detected in preamble")]
InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment), InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment),
DateBeforeCcsdsEpoch(DateBeforeCcsdsEpochError), #[error("date before CCSDS epoch: {0}")]
} DateBeforeCcsdsEpoch(#[from] DateBeforeCcsdsEpochError),
impl Display for CdsError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
CdsError::InvalidCcsdsDays(days) => {
write!(f, "invalid ccsds days {days}")
}
CdsError::InvalidCtorForDaysOfLenInPreamble(length_of_day) => {
write!(
f,
"wrong constructor for length of day {length_of_day:?} detected in preamble",
)
}
CdsError::DateBeforeCcsdsEpoch(e) => write!(f, "date before CCSDS epoch: {e}"),
}
}
}
#[cfg(feature = "std")]
impl Error for CdsError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
CdsError::DateBeforeCcsdsEpoch(e) => Some(e),
_ => None,
}
}
}
impl From<DateBeforeCcsdsEpochError> for CdsError {
fn from(value: DateBeforeCcsdsEpochError) -> Self {
Self::DateBeforeCcsdsEpoch(value)
}
} }
pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment { pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
@ -145,6 +111,7 @@ pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
LengthOfDaySegment::Short16Bits LengthOfDaySegment::Short16Bits
} }
#[inline]
pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision { pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision {
match pfield & 0b11 { match pfield & 0b11 {
0b01 => SubmillisPrecision::Microseconds, 0b01 => SubmillisPrecision::Microseconds,
@ -272,54 +239,70 @@ impl ConversionFromUnix {
} }
impl CdsCommon for ConversionFromUnix { impl CdsCommon for ConversionFromUnix {
#[inline]
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submilis_prec self.submilis_prec
} }
#[inline]
fn ms_of_day(&self) -> u32 { fn ms_of_day(&self) -> u32 {
self.ms_of_day self.ms_of_day
} }
#[inline]
fn ccsds_days_as_u32(&self) -> u32 { fn ccsds_days_as_u32(&self) -> u32 {
self.ccsds_days self.ccsds_days
} }
#[inline]
fn submillis(&self) -> u32 { fn submillis(&self) -> u32 {
self.submillis self.submillis
} }
} }
impl CdsConverter for ConversionFromUnix { impl CdsConverter for ConversionFromUnix {
#[inline]
fn unix_days_seconds(&self) -> i64 { fn unix_days_seconds(&self) -> i64 {
self.unix_days_seconds self.unix_days_seconds
} }
} }
/// Helper struct which generates fields for the CDS time provider from a datetime. /// Helper struct which generates fields for the CDS time provider from a datetime.
#[cfg(feature = "chrono")]
struct ConversionFromChronoDatetime { struct ConversionFromChronoDatetime {
unix_conversion: ConversionFromUnix, unix_conversion: ConversionFromUnix,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
submillis: u32, submillis: u32,
} }
#[cfg(feature = "chrono")]
impl CdsCommon for ConversionFromChronoDatetime { impl CdsCommon for ConversionFromChronoDatetime {
#[inline]
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate! { delegate::delegate! {
to self.unix_conversion { to self.unix_conversion {
#[inline]
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
#[inline]
fn ccsds_days_as_u32(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32;
} }
} }
#[inline]
fn submillis(&self) -> u32 { fn submillis(&self) -> u32 {
self.submillis self.submillis
} }
} }
#[cfg(feature = "chrono")]
impl CdsConverter for ConversionFromChronoDatetime { impl CdsConverter for ConversionFromChronoDatetime {
delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }} delegate::delegate! {to self.unix_conversion {
#[inline]
fn unix_days_seconds(&self) -> i64;
}}
} }
#[inline] #[inline]
@ -353,7 +336,6 @@ impl ConversionFromChronoDatetime {
Self::new_generic(dt, SubmillisPrecision::Picoseconds) Self::new_generic(dt, SubmillisPrecision::Picoseconds)
} }
#[cfg(feature = "chrono")]
fn new_generic( fn new_generic(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
prec: SubmillisPrecision, prec: SubmillisPrecision,
@ -435,7 +417,7 @@ impl CdsCommon for ConversionFromNow {
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate! { delegate::delegate! {
to self.unix_conversion { to self.unix_conversion {
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
fn ccsds_days_as_u32(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32;
@ -449,7 +431,7 @@ impl CdsCommon for ConversionFromNow {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl CdsConverter for ConversionFromNow { impl CdsConverter for ConversionFromNow {
delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }} delegate::delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
} }
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
@ -1609,6 +1591,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now() { fn test_time_now() {
let timestamp_now = CdsTime::now_with_u16_days().unwrap(); let timestamp_now = CdsTime::now_with_u16_days().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1616,6 +1599,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_us_prec() { fn test_time_now_us_prec() {
let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap(); let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1623,6 +1607,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec() { fn test_time_now_ps_prec() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1630,6 +1615,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u16_days() { fn test_time_now_ps_prec_u16_days() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1637,6 +1623,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u24_days() { fn test_time_now_ps_prec_u24_days() {
let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap(); let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -2293,6 +2280,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_update_from_now() { fn test_update_from_now() {
let mut stamp = CdsTime::new_with_u16_days(0, 0); let mut stamp = CdsTime::new_with_u16_days(0, 0);
let _ = stamp.update_from_now(); let _ = stamp.update_from_now();
@ -2308,6 +2296,7 @@ mod tests {
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
#[cfg_attr(miri, ignore)]
fn test_serialization() { fn test_serialization() {
let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time"); let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time");
let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed"); let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed");

View File

@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};
use core::fmt::{Debug, Display, Formatter}; use core::fmt::{Debug, Display, Formatter};
use core::ops::{Add, AddAssign}; use core::ops::{Add, AddAssign};
use core::time::Duration; use core::time::Duration;
use core::u64;
use crate::ByteConversionError; use crate::ByteConversionError;
@ -177,6 +176,7 @@ pub struct FractionalPart {
} }
impl FractionalPart { impl FractionalPart {
#[inline]
pub const fn new(resolution: FractionalResolution, counter: u32) -> Self { pub const fn new(resolution: FractionalResolution, counter: u32) -> Self {
let div = fractional_res_to_div(resolution); let div = fractional_res_to_div(resolution);
assert!(counter <= div, "invalid counter for resolution"); assert!(counter <= div, "invalid counter for resolution");
@ -187,15 +187,18 @@ impl FractionalPart {
} }
/// An empty fractional part for second resolution only. /// An empty fractional part for second resolution only.
#[inline]
pub const fn new_with_seconds_resolution() -> Self { pub const fn new_with_seconds_resolution() -> Self {
Self::new(FractionalResolution::Seconds, 0) Self::new(FractionalResolution::Seconds, 0)
} }
/// Helper method which simply calls [Self::new_with_seconds_resolution]. /// Helper method which simply calls [Self::new_with_seconds_resolution].
#[inline]
pub const fn new_empty() -> Self { pub const fn new_empty() -> Self {
Self::new_with_seconds_resolution() Self::new_with_seconds_resolution()
} }
#[inline]
pub fn new_checked(resolution: FractionalResolution, counter: u32) -> Option<Self> { pub fn new_checked(resolution: FractionalResolution, counter: u32) -> Option<Self> {
let div = fractional_res_to_div(resolution); let div = fractional_res_to_div(resolution);
if counter > div { if counter > div {
@ -207,14 +210,17 @@ impl FractionalPart {
}) })
} }
#[inline]
pub fn resolution(&self) -> FractionalResolution { pub fn resolution(&self) -> FractionalResolution {
self.resolution self.resolution
} }
#[inline]
pub fn counter(&self) -> u32 { pub fn counter(&self) -> u32 {
self.counter self.counter
} }
#[inline]
pub fn no_fractional_part(&self) -> bool { pub fn no_fractional_part(&self) -> bool {
self.resolution == FractionalResolution::Seconds self.resolution == FractionalResolution::Seconds
} }
@ -285,6 +291,7 @@ pub struct CucTimeWithLeapSecs {
} }
impl CucTimeWithLeapSecs { impl CucTimeWithLeapSecs {
#[inline]
pub fn new(time: CucTime, leap_seconds: u32) -> Self { pub fn new(time: CucTime, leap_seconds: u32) -> Self {
Self { time, leap_seconds } Self { time, leap_seconds }
} }
@ -300,6 +307,7 @@ pub fn pfield_len(pfield: u8) -> usize {
impl CucTime { impl CucTime {
/// Create a time provider with a four byte counter and no fractional part. /// Create a time provider with a four byte counter and no fractional part.
#[inline]
pub fn new(counter: u32) -> Self { pub fn new(counter: u32) -> Self {
// These values are definitely valid, so it is okay to unwrap here. // These values are definitely valid, so it is okay to unwrap here.
Self::new_generic( Self::new_generic(
@ -310,11 +318,13 @@ impl CucTime {
} }
/// Like [CucTime::new] but allow to supply a fractional part as well. /// Like [CucTime::new] but allow to supply a fractional part as well.
#[inline]
pub fn new_with_fractions(counter: u32, fractions: FractionalPart) -> Result<Self, CucError> { pub fn new_with_fractions(counter: u32, fractions: FractionalPart) -> Result<Self, CucError> {
Self::new_generic(WidthCounterPair(4, counter), fractions) Self::new_generic(WidthCounterPair(4, counter), fractions)
} }
/// Fractions with a resolution of ~ 4 ms /// Fractions with a resolution of ~ 4 ms
#[inline]
pub fn new_with_coarse_fractions(counter: u32, subsec_fractions: u8) -> Self { pub fn new_with_coarse_fractions(counter: u32, subsec_fractions: u8) -> Self {
// These values are definitely valid, so it is okay to unwrap here. // These values are definitely valid, so it is okay to unwrap here.
Self::new_generic( Self::new_generic(
@ -328,6 +338,7 @@ impl CucTime {
} }
/// Fractions with a resolution of ~ 16 us /// Fractions with a resolution of ~ 16 us
#[inline]
pub fn new_with_medium_fractions(counter: u32, subsec_fractions: u16) -> Self { pub fn new_with_medium_fractions(counter: u32, subsec_fractions: u16) -> Self {
// These values are definitely valid, so it is okay to unwrap here. // These values are definitely valid, so it is okay to unwrap here.
Self::new_generic( Self::new_generic(
@ -343,6 +354,7 @@ impl CucTime {
/// Fractions with a resolution of ~ 60 ns. The fractional part value is limited by the /// Fractions with a resolution of ~ 60 ns. The fractional part value is limited by the
/// 24 bits of the fractional field, so this function will fail with /// 24 bits of the fractional field, so this function will fail with
/// [CucError::InvalidFractions] if the fractional value exceeds the value. /// [CucError::InvalidFractions] if the fractional value exceeds the value.
#[inline]
pub fn new_with_fine_fractions(counter: u32, subsec_fractions: u32) -> Result<Self, CucError> { pub fn new_with_fine_fractions(counter: u32, subsec_fractions: u32) -> Result<Self, CucError> {
Self::new_generic( Self::new_generic(
WidthCounterPair(4, counter), WidthCounterPair(4, counter),
@ -457,6 +469,7 @@ impl CucTime {
/// Most generic constructor which allows full configurability for the counter and for the /// Most generic constructor which allows full configurability for the counter and for the
/// fractions. /// fractions.
#[inline]
pub fn new_generic( pub fn new_generic(
width_and_counter: WidthCounterPair, width_and_counter: WidthCounterPair,
fractions: FractionalPart, fractions: FractionalPart,
@ -476,31 +489,38 @@ impl CucTime {
}) })
} }
#[inline]
pub fn ccsds_time_code(&self) -> CcsdsTimeCode { pub fn ccsds_time_code(&self) -> CcsdsTimeCode {
CcsdsTimeCode::CucCcsdsEpoch CcsdsTimeCode::CucCcsdsEpoch
} }
#[inline]
pub fn width_counter_pair(&self) -> WidthCounterPair { pub fn width_counter_pair(&self) -> WidthCounterPair {
self.counter self.counter
} }
#[inline]
pub fn counter_width(&self) -> u8 { pub fn counter_width(&self) -> u8 {
self.counter.0 self.counter.0
} }
#[inline]
pub fn counter(&self) -> u32 { pub fn counter(&self) -> u32 {
self.counter.1 self.counter.1
} }
/// Subsecond fractional part of the CUC time. /// Subsecond fractional part of the CUC time.
#[inline]
pub fn fractions(&self) -> FractionalPart { pub fn fractions(&self) -> FractionalPart {
self.fractions self.fractions
} }
#[inline]
pub fn to_leap_sec_helper(&self, leap_seconds: u32) -> CucTimeWithLeapSecs { pub fn to_leap_sec_helper(&self, leap_seconds: u32) -> CucTimeWithLeapSecs {
CucTimeWithLeapSecs::new(*self, leap_seconds) CucTimeWithLeapSecs::new(*self, leap_seconds)
} }
#[inline]
pub fn set_fractions(&mut self, fractions: FractionalPart) -> Result<(), CucError> { pub fn set_fractions(&mut self, fractions: FractionalPart) -> Result<(), CucError> {
Self::verify_fractions_value(fractions)?; Self::verify_fractions_value(fractions)?;
self.fractions = fractions; self.fractions = fractions;
@ -510,6 +530,7 @@ impl CucTime {
/// Set a fractional resolution. Please note that this function will reset the fractional value /// Set a fractional resolution. Please note that this function will reset the fractional value
/// to 0 if the resolution changes. /// to 0 if the resolution changes.
#[inline]
pub fn set_fractional_resolution(&mut self, res: FractionalResolution) { pub fn set_fractional_resolution(&mut self, res: FractionalResolution) {
if res == FractionalResolution::Seconds { if res == FractionalResolution::Seconds {
self.fractions = FractionalPart::new_with_seconds_resolution(); self.fractions = FractionalPart::new_with_seconds_resolution();
@ -519,6 +540,7 @@ impl CucTime {
} }
} }
#[inline]
fn build_p_field(counter_width: u8, resolution: FractionalResolution) -> u8 { fn build_p_field(counter_width: u8, resolution: FractionalResolution) -> u8 {
let mut pfield = P_FIELD_BASE; let mut pfield = P_FIELD_BASE;
if !(1..=4).contains(&counter_width) { if !(1..=4).contains(&counter_width) {
@ -538,6 +560,7 @@ impl CucTime {
pfield pfield
} }
#[inline]
fn update_p_field_fractions(&mut self) { fn update_p_field_fractions(&mut self) {
self.pfield &= !(0b11); self.pfield &= !(0b11);
self.pfield |= self.fractions.resolution() as u8; self.pfield |= self.fractions.resolution() as u8;
@ -582,6 +605,7 @@ impl CucTime {
) )
} }
#[inline]
pub fn len_packed_from_pfield(pfield: u8) -> usize { pub fn len_packed_from_pfield(pfield: u8) -> usize {
let mut base_len: usize = 1; let mut base_len: usize = 1;
base_len += Self::len_cntr_from_pfield(pfield) as usize; base_len += Self::len_cntr_from_pfield(pfield) as usize;
@ -590,6 +614,7 @@ impl CucTime {
} }
/// Verifies the raw width parameter. /// Verifies the raw width parameter.
#[inline]
fn verify_counter_width(width: u8) -> Result<(), CucError> { fn verify_counter_width(width: u8) -> Result<(), CucError> {
if width == 0 || width > 4 { if width == 0 || width > 4 {
return Err(CucError::InvalidCounterWidth(width)); return Err(CucError::InvalidCounterWidth(width));
@ -597,6 +622,7 @@ impl CucTime {
Ok(()) Ok(())
} }
#[inline]
fn verify_fractions_value(val: FractionalPart) -> Result<(), CucError> { fn verify_fractions_value(val: FractionalPart) -> Result<(), CucError> {
if val.counter > 2u32.pow((val.resolution as u32) * 8) - 1 { if val.counter > 2u32.pow((val.resolution as u32) * 8) - 1 {
return Err(CucError::InvalidFractions { return Err(CucError::InvalidFractions {
@ -607,10 +633,12 @@ impl CucTime {
Ok(()) Ok(())
} }
#[inline]
fn len_as_bytes(&self) -> usize { fn len_as_bytes(&self) -> usize {
Self::len_packed_from_pfield(self.pfield) Self::len_packed_from_pfield(self.pfield)
} }
#[inline]
fn subsec_nanos(&self) -> u32 { fn subsec_nanos(&self) -> u32 {
if self.fractions.resolution() == FractionalResolution::Seconds { if self.fractions.resolution() == FractionalResolution::Seconds {
return 0; return 0;
@ -751,22 +779,27 @@ impl TimeWriter for CucTime {
} }
impl CcsdsTimeProvider for CucTimeWithLeapSecs { impl CcsdsTimeProvider for CucTimeWithLeapSecs {
#[inline]
fn len_as_bytes(&self) -> usize { fn len_as_bytes(&self) -> usize {
self.time.len_as_bytes() self.time.len_as_bytes()
} }
#[inline]
fn p_field(&self) -> (usize, [u8; 2]) { fn p_field(&self) -> (usize, [u8; 2]) {
(1, [self.time.pfield, 0]) (1, [self.time.pfield, 0])
} }
#[inline]
fn ccdsd_time_code(&self) -> CcsdsTimeCode { fn ccdsd_time_code(&self) -> CcsdsTimeCode {
self.time.ccsds_time_code() self.time.ccsds_time_code()
} }
#[inline]
fn unix_secs(&self) -> i64 { fn unix_secs(&self) -> i64 {
self.time.unix_secs(self.leap_seconds) self.time.unix_secs(self.leap_seconds)
} }
#[inline]
fn subsec_nanos(&self) -> u32 { fn subsec_nanos(&self) -> u32 {
self.time.subsec_nanos() self.time.subsec_nanos()
} }
@ -913,6 +946,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_datetime_now() { fn test_datetime_now() {
let now = chrono::Utc::now(); let now = chrono::Utc::now();
let cuc_now = CucTime::now(FractionalResolution::SixtyNs, LEAP_SECONDS); let cuc_now = CucTime::now(FractionalResolution::SixtyNs, LEAP_SECONDS);
@ -1244,6 +1278,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn set_fract_resolution() { fn set_fract_resolution() {
let mut stamp = CucTime::new(2000); let mut stamp = CucTime::new(2000);
stamp.set_fractional_resolution(FractionalResolution::SixtyNs); stamp.set_fractional_resolution(FractionalResolution::SixtyNs);

View File

@ -6,7 +6,6 @@ use core::cmp::Ordering;
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
use core::ops::{Add, AddAssign, Sub}; use core::ops::{Add, AddAssign, Sub};
use core::time::Duration; use core::time::Duration;
use core::u8;
#[allow(unused_imports)] #[allow(unused_imports)]
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
@ -64,20 +63,12 @@ pub fn ccsds_time_code_from_p_field(pfield: u8) -> Result<CcsdsTimeCode, u8> {
CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits) CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits)
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("date before ccsds epoch: {0:?}")]
pub struct DateBeforeCcsdsEpochError(UnixTime); pub struct DateBeforeCcsdsEpochError(UnixTime);
impl Display for DateBeforeCcsdsEpochError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
write!(f, "date before ccsds epoch: {:?}", self.0)
}
}
#[cfg(feature = "std")]
impl Error for DateBeforeCcsdsEpochError {}
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
@ -166,6 +157,7 @@ pub fn seconds_since_epoch() -> f64 {
/// ///
/// - CCSDS epoch: 1958-01-01T00:00:00+00:00 /// - CCSDS epoch: 1958-01-01T00:00:00+00:00
/// - UNIX Epoch: 1970-01-01T00:00:00+00:00 /// - UNIX Epoch: 1970-01-01T00:00:00+00:00
#[inline]
pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 { pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 {
unix_days - DAYS_CCSDS_TO_UNIX as i64 unix_days - DAYS_CCSDS_TO_UNIX as i64
} }
@ -174,16 +166,19 @@ pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 {
/// ///
/// - CCSDS epoch: 1958-01-01T00:00:00+00:00 /// - CCSDS epoch: 1958-01-01T00:00:00+00:00
/// - UNIX Epoch: 1970-01-01T00:00:00+00:00 /// - UNIX Epoch: 1970-01-01T00:00:00+00:00
#[inline]
pub const fn ccsds_to_unix_days(ccsds_days: i64) -> i64 { pub const fn ccsds_to_unix_days(ccsds_days: i64) -> i64 {
ccsds_days + DAYS_CCSDS_TO_UNIX as i64 ccsds_days + DAYS_CCSDS_TO_UNIX as i64
} }
/// Similar to [unix_to_ccsds_days] but converts the epoch instead, which is the number of elpased /// Similar to [unix_to_ccsds_days] but converts the epoch instead, which is the number of elpased
/// seconds since the CCSDS and UNIX epoch times. /// seconds since the CCSDS and UNIX epoch times.
#[inline]
pub const fn unix_epoch_to_ccsds_epoch(unix_epoch: i64) -> i64 { pub const fn unix_epoch_to_ccsds_epoch(unix_epoch: i64) -> i64 {
unix_epoch - (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64) unix_epoch - (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64)
} }
#[inline]
pub const fn ccsds_epoch_to_unix_epoch(ccsds_epoch: i64) -> i64 { pub const fn ccsds_epoch_to_unix_epoch(ccsds_epoch: i64) -> i64 {
ccsds_epoch + (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64) ccsds_epoch + (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64)
} }
@ -547,6 +542,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_get_current_time() { fn test_get_current_time() {
let sec_floats = seconds_since_epoch(); let sec_floats = seconds_since_epoch();
assert!(sec_floats > 0.0); assert!(sec_floats > 0.0);
@ -561,6 +557,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_ccsds_epoch() { fn test_ccsds_epoch() {
let now = SystemTime::now() let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH) .duration_since(SystemTime::UNIX_EPOCH)
@ -681,6 +678,7 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_from_now() { fn test_from_now() {
let stamp_now = UnixTime::now().unwrap(); let stamp_now = UnixTime::now().unwrap();
let dt_now = stamp_now.chrono_date_time().unwrap(); let dt_now = stamp_now.chrono_date_time().unwrap();

View File

@ -15,10 +15,12 @@ pub trait ToBeBytes {
impl ToBeBytes for () { impl ToBeBytes for () {
type ByteArray = [u8; 0]; type ByteArray = [u8; 0];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
0 0
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
[] []
} }
@ -27,9 +29,12 @@ impl ToBeBytes for () {
impl ToBeBytes for u8 { impl ToBeBytes for u8 {
type ByteArray = [u8; 1]; type ByteArray = [u8; 1];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
1 1
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u8::to_be_bytes(*self) u8::to_be_bytes(*self)
} }
@ -38,9 +43,12 @@ impl ToBeBytes for u8 {
impl ToBeBytes for u16 { impl ToBeBytes for u16 {
type ByteArray = [u8; 2]; type ByteArray = [u8; 2];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
2 2
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u16::to_be_bytes(*self) u16::to_be_bytes(*self)
} }
@ -49,9 +57,12 @@ impl ToBeBytes for u16 {
impl ToBeBytes for u32 { impl ToBeBytes for u32 {
type ByteArray = [u8; 4]; type ByteArray = [u8; 4];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
4 4
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u32::to_be_bytes(*self) u32::to_be_bytes(*self)
} }
@ -60,9 +71,12 @@ impl ToBeBytes for u32 {
impl ToBeBytes for u64 { impl ToBeBytes for u64 {
type ByteArray = [u8; 8]; type ByteArray = [u8; 8];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
8 8
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u64::to_be_bytes(*self) u64::to_be_bytes(*self)
} }
@ -104,6 +118,7 @@ pub enum UnsignedByteFieldError {
} }
impl From<ByteConversionError> for UnsignedByteFieldError { impl From<ByteConversionError> for UnsignedByteFieldError {
#[inline]
fn from(value: ByteConversionError) -> Self { fn from(value: ByteConversionError) -> Self {
Self::ByteConversionError(value) Self::ByteConversionError(value)
} }
@ -138,14 +153,17 @@ pub struct UnsignedByteField {
} }
impl UnsignedByteField { impl UnsignedByteField {
#[inline]
pub const fn new(width: usize, value: u64) -> Self { pub const fn new(width: usize, value: u64) -> Self {
Self { width, value } Self { width, value }
} }
#[inline]
pub const fn value_const(&self) -> u64 { pub const fn value_const(&self) -> u64 {
self.value self.value
} }
#[inline]
pub fn new_from_be_bytes(width: usize, buf: &[u8]) -> Result<Self, UnsignedByteFieldError> { pub fn new_from_be_bytes(width: usize, buf: &[u8]) -> Result<Self, UnsignedByteFieldError> {
if width > buf.len() { if width > buf.len() {
return Err(ByteConversionError::FromSliceTooSmall { return Err(ByteConversionError::FromSliceTooSmall {
@ -178,10 +196,12 @@ impl UnsignedByteField {
} }
impl UnsignedEnum for UnsignedByteField { impl UnsignedEnum for UnsignedByteField {
#[inline]
fn size(&self) -> usize { fn size(&self) -> usize {
self.width self.width
} }
#[inline]
fn value(&self) -> u64 { fn value(&self) -> u64 {
self.value_const() self.value_const()
} }
@ -237,6 +257,7 @@ impl<TYPE: Copy + Into<u64>> GenericUnsignedByteField<TYPE> {
} }
impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteField<TYPE> { impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteField<TYPE> {
#[inline]
fn size(&self) -> usize { fn size(&self) -> usize {
self.value.written_len() self.value.written_len()
} }
@ -252,6 +273,7 @@ impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteFie
Ok(self.value.written_len()) Ok(self.value.written_len())
} }
#[inline]
fn value(&self) -> u64 { fn value(&self) -> u64 {
self.value_typed().into() self.value_typed().into()
} }
@ -277,6 +299,7 @@ impl From<UnsignedByteFieldU8> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 1 { if value.width != 1 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -289,6 +312,7 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 {
} }
impl From<UnsignedByteFieldU16> for UnsignedByteField { impl From<UnsignedByteFieldU16> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU16) -> Self { fn from(value: UnsignedByteFieldU16) -> Self {
Self::new(2, value.value as u64) Self::new(2, value.value as u64)
} }
@ -297,6 +321,7 @@ impl From<UnsignedByteFieldU16> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 2 { if value.width != 2 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -309,6 +334,7 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 {
} }
impl From<UnsignedByteFieldU32> for UnsignedByteField { impl From<UnsignedByteFieldU32> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU32) -> Self { fn from(value: UnsignedByteFieldU32) -> Self {
Self::new(4, value.value as u64) Self::new(4, value.value as u64)
} }
@ -317,6 +343,7 @@ impl From<UnsignedByteFieldU32> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 4 { if value.width != 4 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -329,6 +356,7 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 {
} }
impl From<UnsignedByteFieldU64> for UnsignedByteField { impl From<UnsignedByteFieldU64> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU64) -> Self { fn from(value: UnsignedByteFieldU64) -> Self {
Self::new(8, value.value) Self::new(8, value.value)
} }
@ -337,6 +365,7 @@ impl From<UnsignedByteFieldU64> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU64 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU64 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 8 { if value.width != 8 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {