6 Commits

Author SHA1 Message Date
179e8b73b6 refactor CUC again
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
Rust/spacepackets/pipeline/pr-main There was a failure building this commit
2024-03-14 12:49:59 +01:00
83c84bb31d make the API even more similar
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
Rust/spacepackets/pipeline/pr-main There was a failure building this commit
2024-03-14 10:50:31 +01:00
05b6503851 UnixTimestamp now has nanosecond precision
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-03-14 10:13:29 +01:00
aae246dca6 correction for unittest 2024-03-13 18:20:24 +01:00
aacdd9f364 add support for the time library
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-03-13 18:12:12 +01:00
de1dc16e60 this works, need to think about where this is the best solution
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-03-13 17:37:51 +01:00
32 changed files with 1984 additions and 2813 deletions

View File

@ -1,39 +1,42 @@
on: [push]
name: ci name: ci
on: [push, pull_request]
jobs: jobs:
check: check:
name: Check build name: Check
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@stable - uses: actions-rs/toolchain@v1
- run: cargo check --release with:
profile: minimal
test: toolchain: stable
name: Run Tests - uses: actions-rs/cargo@v1
runs-on: ubuntu-latest with:
steps: command: check
- uses: actions/checkout@v4 args: --release
- uses: dtolnay/rust-toolchain@stable
- name: Install nextest
uses: taiki-e/install-action@nextest
- run: cargo nextest run --all-features
- run: cargo test --doc
msrv: msrv:
name: Check MSRV name: Check with MSRV
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@1.81.0 - uses: actions-rs/toolchain@v1
- run: cargo check --release with:
toolchain: 1.61.0
override: true
profile: minimal
- uses: actions-rs/cargo@v1
with:
command: check
args: --release
cross-check: cross-check:
name: Check Cross-Compilation name: Check Cross
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
@ -41,32 +44,70 @@ jobs:
- armv7-unknown-linux-gnueabihf - armv7-unknown-linux-gnueabihf
- thumbv7em-none-eabihf - thumbv7em-none-eabihf
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@stable - uses: actions-rs/toolchain@v1
with: with:
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf" profile: minimal
- run: cargo check --release --target=${{matrix.target}} --no-default-features toolchain: stable
target: ${{ matrix.target }}
override: true
- uses: actions-rs/cargo@v1
with:
use-cross: true
command: check
args: --release --target=${{ matrix.target }} --no-default-features
fmt: fmt:
name: Check formatting name: Rustfmt
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@stable - uses: actions-rs/toolchain@v1
- run: cargo fmt --all -- --check with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
docs: check-doc:
name: Check Documentation Build name: Check Documentation Build
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@nightly - uses: actions-rs/toolchain@v1
- run: RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features with:
toolchain: nightly
override: true
profile: minimal
- uses: actions-rs/cargo@v1
with:
command: doc
args: --all-features
clippy: clippy:
name: Clippy name: Clippy
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
- uses: dtolnay/rust-toolchain@stable - uses: actions-rs/toolchain@v1
- run: cargo clippy -- -D warnings with:
profile: minimal
toolchain: stable
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: -- -D warnings
ci:
if: ${{ success() }}
# all new jobs must be added to this list
needs: [check, fmt, clippy]
runs-on: ubuntu-latest
steps:
- name: CI succeeded
run: exit 0

View File

@ -8,108 +8,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased] # [unreleased]
# [v0.13.1] 2025-03-21
- Bugfix due to operator precendence for `PusTcSecondaryHeader::pus_version`,
`PusTcSecondaryHeaderWithoutTimestamp::pus_version`, `CdsTime::from_bytes_with_u16_days` and
`CdsTime::from_bytes_with_u24_days`
# [v0.13.0] 2024-11-08
- Bumped MSRV to 1.81.0
- Bump `zerocopy` to v0.8.0
- Bump `thiserror` to v2.0.0
## Changed
- Migrated all Error implementations to thiserror, improved some naming and error handling in
general
# [v0.12.0] 2024-09-10
- Bumped MSRV to 1.70.0
## Added
- Added new `cfdp::tlv::TlvOwned` type which erases the lifetime and is clonable.
- Dedicated `cfdp::tlv::TlvLvDataTooLarge` error struct for APIs where this is the only possible
API error.
- Added File Data PDU API which expects the expected file data size and then exposes the unwritten
file data field as a mutable slice. This allows to read data from the virtual file system
API to the file data buffer without an intermediate buffer.
- Generic `EofPdu::new` constructor.
- Added generic sequence counter module.
- Added `MsgToUserTlv::to_tlv` converter which reduced the type and converts
it to a generic `Tlv`.
- Implemented `From<MsgToUserTlv> for Tlv` converter trait.
- Added CFDP maximum file segment length calculator method `calculate_max_file_seg_len_for_max_packet_len_and_pdu_header`
## Added and Changed
- Added new `ReadableTlv` to avoid some boilerplate code and have a common abstraction implemented
for both `Tlv` and `TlvOwned` to read the raw TLV data field and its length.
- Replaced `cfdp::tlv::TlvLvError` by `cfdp::tlv::TlvLvDataTooLarge` where applicable.
## Fixed
- Fixed an error in the EOF writer which wrote the fault location to the wrong buffer position.
- cfdp `ConditionCode::CheckLimitReached` previous had the wrong numerical value of `0b1001` (9)
and now has the correct value of `0b1010` (10).
## Changed
- Minor documentation build updates.
- Increased delegate version range to v0.13
# [v0.11.2] 2024-05-19
- Bumped MSRV to 1.68.2
## Fixed
- Removed `defmt::Format` impl for `MetadataPduCreator` which seems to be problematic.
# [v0.11.1] 2024-04-22
## Fixed
- The default data length for for `SpHeader` constructors where the data field length is not
specified is now 0.
- The `SpHeader::new_from_fields` is public now.
## Added
- `SpHeader::to_vec` method.
# [v0.11.0] 2024-04-16
## Changed
- Moved `CCSDS_HEADER_LEN` constant to the crate root.
## Added
- Added `SpacePacketHeader` type alias for `SpHeader` type.
# [v0.11.0-rc.2] 2024-04-04
## Changed
- Renamed `PacketId` and `PacketSequenceCtrl` `new` method to `new_checked` and former
`new_const` method to `new`.
- Renamed `tc`, `tm`, `tc_unseg` and `tm_unseg` variants for `PacketId` and `SpHeader`
to `new_for_tc_checked`, `new_for_tm_checked`, `new_for_unseg_tc_checked` and
`new_for_unseg_tm_checked`.
- `PusTmCreator` and `PusTcCreator` now expect a regular instance of `SpHeader` instead of
a mutable reference.
## Added
- `SpHeader::new_from_apid` and `SpHeader::new_from_apid_checked` constructor.
- `#[inline]` attribute for a lot of small functions.
# [v0.11.0-rc.1] 2024-04-03
Major API changes for the time API. If you are using the time API, it is strongly recommended Major API changes for the time API. If you are using the time API, it is strongly recommended
to check all the API changes in the **Changed** chapter. to check all the API changes in the **Changed** chapter.
@ -126,14 +24,9 @@ to check all the API changes in the **Changed** chapter.
- Added basic support conversions to the `time` library. Introduce new `chrono` and `timelib` - Added basic support conversions to the `time` library. Introduce new `chrono` and `timelib`
feature gate. feature gate.
- Added `CcsdsTimeProvider::timelib_date_time`. - Added `CcsdsTimeProvider::timelib_date_time`.
- Optional support for `defmt` by adding optional `defmt::Format` derives for common types.
## Changed ## Changed
- `PusTcCreator::new_simple` now expects a valid slice for the source data instead of an optional
slice. For telecommands without application data, `&[]` can be passed.
- `PusTmSecondaryHeader` constructors now expects a valid slice for the time stamp instead of an
optional slice.
- Renamed `CcsdsTimeProvider::date_time` to `CcsdsTimeProvider::chrono_date_time` - Renamed `CcsdsTimeProvider::date_time` to `CcsdsTimeProvider::chrono_date_time`
- Renamed `CcsdsTimeCodes` to `CcsdsTimeCode` - Renamed `CcsdsTimeCodes` to `CcsdsTimeCode`
- Renamed `cds::TimeProvider` to `cds::CdsTime` - Renamed `cds::TimeProvider` to `cds::CdsTime`
@ -141,7 +34,6 @@ to check all the API changes in the **Changed** chapter.
- `UnixTimestamp` renamed to `UnixTime` - `UnixTimestamp` renamed to `UnixTime`
- `UnixTime` seconds are now private and can be retrieved using the `secs` member method. - `UnixTime` seconds are now private and can be retrieved using the `secs` member method.
- `UnixTime::new` renamed to `UnixTime::new_checked`. - `UnixTime::new` renamed to `UnixTime::new_checked`.
- `UnixTime::secs` renamed to `UnixTime::as_secs`.
- `UnixTime` now has a nanosecond subsecond precision. The `new` constructor now expects - `UnixTime` now has a nanosecond subsecond precision. The `new` constructor now expects
nanoseconds as the second argument. nanoseconds as the second argument.
- Added new `UnixTime::new_subsec_millis` and `UnixTime::new_subsec_millis_checked` API - Added new `UnixTime::new_subsec_millis` and `UnixTime::new_subsec_millis_checked` API
@ -151,16 +43,6 @@ to check all the API changes in the **Changed** chapter.
- `CcsdsTimeProvider::date_time` renamed to `CcsdsTimeProvider::chrono_date_time`. - `CcsdsTimeProvider::date_time` renamed to `CcsdsTimeProvider::chrono_date_time`.
- Added `UnixTime::MIN`, `UnixTime::MAX` and `UnixTime::EPOCH`. - Added `UnixTime::MIN`, `UnixTime::MAX` and `UnixTime::EPOCH`.
- Added `UnixTime::timelib_date_time`. - Added `UnixTime::timelib_date_time`.
- Error handling for ECSS and time module is more granular now, with a new
`DateBeforeCcsdsEpochError` error and a `DateBeforeCcsdsEpoch` enum variant for both
`CdsError` and `CucError`.
- `PusTmCreator` now has two lifetimes: One for the raw source data buffer and one for the
raw timestamp.
- Time API `from_now*` API renamed to `now*`.
## Removed
- Legacy `PusTm` and `PusTc` objects.
# [v0.11.0-rc.0] 2024-03-04 # [v0.11.0-rc.0] 2024-03-04
@ -567,7 +449,3 @@ The timestamp of `PusTm` is now optional. See Added and Changed section for deta
Initial release with CCSDS Space Packet Primary Header implementation and basic PUS TC and TM Initial release with CCSDS Space Packet Primary Header implementation and basic PUS TC and TM
implementations. implementations.
[v0.13.1]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.13.0...v0.13.1
[v0.13.0]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.12.0...v0.13.0
[v0.12.0]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.11.2...v0.12.0

View File

@ -1,8 +1,8 @@
[package] [package]
name = "spacepackets" name = "spacepackets"
version = "0.13.1" version = "0.11.0-rc.0"
edition = "2021" edition = "2021"
rust-version = "1.81.0" rust-version = "1.61"
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"] authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
description = "Generic implementations for various CCSDS and ECSS packet standards" description = "Generic implementations for various CCSDS and ECSS packet standards"
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets" homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
@ -14,16 +14,15 @@ categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-sup
[dependencies] [dependencies]
crc = "3" crc = "3"
delegate = ">=0.8, <=0.13" delegate = ">=0.8, <0.11"
paste = "1"
[dependencies.zerocopy] [dependencies.zerocopy]
version = "0.8" version = "0.7"
features = ["derive"] features = ["derive"]
[dependencies.thiserror] [dependencies.thiserror]
version = "2" version = "1"
default-features = false optional = true
[dependencies.num_enum] [dependencies.num_enum]
version = ">0.5, <=0.7" version = ">0.5, <=0.7"
@ -49,21 +48,18 @@ optional = true
version = "0.2" version = "0.2"
default-features = false default-features = false
[dependencies.defmt] [dev-dependencies]
version = "0.3" postcard = "1"
optional = true chrono = "0.4"
[features] [features]
default = ["std"] default = ["std"]
std = ["alloc", "chrono/std", "chrono/clock", "thiserror/std"] std = ["chrono/std", "chrono/clock", "alloc", "thiserror"]
serde = ["dep:serde", "chrono?/serde"] serde = ["dep:serde", "chrono/serde"]
alloc = ["chrono?/alloc", "defmt?/alloc", "serde?/alloc"] alloc = ["postcard/alloc", "chrono/alloc"]
chrono = ["dep:chrono"]
timelib = ["dep:time"] timelib = ["dep:time"]
[dev-dependencies]
postcard = { version = "1", features = ["alloc"] }
chrono = "0.4"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
rustdoc-args = ["--generate-link-to-definition"] rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"]

View File

@ -1 +0,0 @@
github: robamu

View File

@ -29,6 +29,10 @@ Currently, this includes the following components:
`spacepackets` supports various runtime environments and is also suitable for `no_std` environments. `spacepackets` supports various runtime environments and is also suitable for `no_std` environments.
It also offers optional support for [`serde`](https://serde.rs/). This allows serializing and
deserializing them with an appropriate `serde` provider like
[`postcard`](https://github.com/jamesmunns/postcard).
## Default features ## Default features
- [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library. - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library.
@ -41,8 +45,6 @@ Currently, this includes the following components:
- [`serde`](https://serde.rs/): Adds `serde` support for most types by adding `Serialize` and `Deserialize` `derive`s - [`serde`](https://serde.rs/): Adds `serde` support for most types by adding `Serialize` and `Deserialize` `derive`s
- [`chrono`](https://crates.io/crates/chrono): Add basic support for the `chrono` time library. - [`chrono`](https://crates.io/crates/chrono): Add basic support for the `chrono` time library.
- [`timelib`](https://crates.io/crates/time): Add basic support for the `time` time library. - [`timelib`](https://crates.io/crates/time): Add basic support for the `time` time library.
- [`defmt`](https://defmt.ferrous-systems.com/): Add support for the `defmt` by adding the
[`defmt::Format`](https://defmt.ferrous-systems.com/format) derive on many types.
# Examples # Examples
@ -61,13 +63,3 @@ cargo install grcov --locked
After that, you can simply run `coverage.py` to test the project with coverage. You can optionally After that, you can simply run `coverage.py` to test the project with coverage. You can optionally
supply the `--open` flag to open the coverage report in your webbrowser. supply the `--open` flag to open the coverage report in your webbrowser.
# Miri
You can run the [`miri`](https://github.com/rust-lang/miri) tool on this library to check for
undefined behaviour (UB). This library does not use use any `unsafe` code blocks, but `miri` could
still catch UB from used libraries.
```sh
cargo +nightly miri nextest run --all-features
```

View File

@ -15,10 +15,7 @@ RUN rustup install nightly && \
rustup target add thumbv7em-none-eabihf armv7-unknown-linux-gnueabihf && \ rustup target add thumbv7em-none-eabihf armv7-unknown-linux-gnueabihf && \
rustup component add rustfmt clippy llvm-tools-preview rustup component add rustfmt clippy llvm-tools-preview
# Get grcov
RUN curl -sSL https://github.com/mozilla/grcov/releases/download/v0.8.19/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar -xj --directory /usr/local/bin RUN curl -sSL https://github.com/mozilla/grcov/releases/download/v0.8.19/grcov-x86_64-unknown-linux-gnu.tar.bz2 | tar -xj --directory /usr/local/bin
# Get nextest
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
# SSH stuff to allow deployment to doc server # SSH stuff to allow deployment to doc server
RUN adduser --uid 114 jenkins RUN adduser --uid 114 jenkins

View File

@ -21,9 +21,7 @@ pipeline {
} }
stage('Docs') { stage('Docs') {
steps { steps {
sh """ sh 'cargo +nightly doc --all-features'
RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
"""
} }
} }
stage('Rustfmt') { stage('Rustfmt') {
@ -33,8 +31,7 @@ pipeline {
} }
stage('Test') { stage('Test') {
steps { steps {
sh 'cargo nextest r --all-features' sh 'cargo test --all-features'
sh 'cargo test --doc'
} }
} }
stage('Check with all features') { stage('Check with all features') {

View File

@ -1,3 +0,0 @@
#!/bin/sh
export RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options"
cargo +nightly doc --all-features --open

View File

@ -4,14 +4,11 @@ Checklist for new releases
# Pre-Release # Pre-Release
1. Make sure any new modules are documented sufficiently enough and check docs with 1. Make sure any new modules are documented sufficiently enough and check docs with
`RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features --open` `cargo +nightly doc --all-features --config 'rustdocflags=["--cfg", "doc_cfg"]' --open`.
or `cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docsrs" --generate-link-to-definition"]' --open`
(was problematic on more recent nightly versions).
2. Bump version specifier in `Cargo.toml`. 2. Bump version specifier in `Cargo.toml`.
3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new 3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new
`unreleased` section. `unreleased` section.
4. Run `cargo test --all-features` or `cargo nextest r --all-features` together with 4. Run `cargo test --all-features`.
`cargo test --doc`.
5. Run `cargo fmt` and `cargo clippy`. Check `cargo msrv` against MSRV in `Cargo.toml`. 5. Run `cargo fmt` and `cargo clippy`. Check `cargo msrv` against MSRV in `Cargo.toml`.
6. Wait for CI/CD results for EGit and Github. These also check cross-compilation for bare-metal 6. Wait for CI/CD results for EGit and Github. These also check cross-compilation for bare-metal
targets. targets.

View File

@ -1,4 +1,5 @@
//! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8. //! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
use crate::cfdp::TlvLvError;
use crate::ByteConversionError; use crate::ByteConversionError;
use core::str::Utf8Error; use core::str::Utf8Error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
@ -6,8 +7,6 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::string::String; use std::string::String;
use super::TlvLvDataTooLargeError;
pub const MIN_LV_LEN: usize = 1; pub const MIN_LV_LEN: usize = 1;
/// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8. /// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
@ -21,7 +20,6 @@ pub const MIN_LV_LEN: usize = 1;
/// this will be the lifetime of that data reference. /// this will be the lifetime of that data reference.
#[derive(Debug, Copy, Clone, Eq)] #[derive(Debug, Copy, Clone, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Lv<'data> { pub struct Lv<'data> {
data: &'data [u8], data: &'data [u8],
// If the LV was generated from a raw bytestream, this will contain the start of the // If the LV was generated from a raw bytestream, this will contain the start of the
@ -63,10 +61,9 @@ pub(crate) fn generic_len_check_deserialization(
} }
impl<'data> Lv<'data> { impl<'data> Lv<'data> {
#[inline] pub fn new(data: &[u8]) -> Result<Lv, TlvLvError> {
pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLargeError> {
if data.len() > u8::MAX as usize { if data.len() > u8::MAX as usize {
return Err(TlvLvDataTooLargeError(data.len())); return Err(TlvLvError::DataTooLarge(data.len()));
} }
Ok(Lv { Ok(Lv {
data, data,
@ -75,7 +72,6 @@ impl<'data> Lv<'data> {
} }
/// Creates a LV with an empty value field. /// Creates a LV with an empty value field.
#[inline]
pub fn new_empty() -> Lv<'data> { pub fn new_empty() -> Lv<'data> {
Lv { Lv {
data: &[], data: &[],
@ -85,52 +81,45 @@ impl<'data> Lv<'data> {
/// Helper function to build a string LV. This is especially useful for the file or directory /// Helper function to build a string LV. This is especially useful for the file or directory
/// path LVs /// path LVs
#[inline] pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvError> {
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLargeError> {
Self::new(str_slice.as_bytes()) Self::new(str_slice.as_bytes())
} }
/// Helper function to build a string LV. This is especially useful for the file or directory /// Helper function to build a string LV. This is especially useful for the file or directory
/// path LVs /// path LVs
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[inline] #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLargeError> { pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvError> {
Self::new(string.as_bytes()) Self::new(string.as_bytes())
} }
/// Returns the length of the value part, not including the length byte. /// Returns the length of the value part, not including the length byte.
#[inline]
pub fn len_value(&self) -> usize { pub fn len_value(&self) -> usize {
self.data.len() self.data.len()
} }
/// Returns the full raw length, including the length byte. /// Returns the full raw length, including the length byte.
#[inline]
pub fn len_full(&self) -> usize { pub fn len_full(&self) -> usize {
self.len_value() + 1 self.len_value() + 1
} }
/// Checks whether the value field is empty. /// Checks whether the value field is empty.
#[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.data.len() == 0 self.data.len() == 0
} }
#[inline]
pub fn value(&self) -> &[u8] { pub fn value(&self) -> &[u8] {
self.data self.data
} }
/// If the LV was generated from a raw bytestream using [Self::from_bytes], the raw start /// If the LV was generated from a raw bytestream using [Self::from_bytes], the raw start
/// of the LV can be retrieved with this method. /// of the LV can be retrieved with this method.
#[inline]
pub fn raw_data(&self) -> Option<&[u8]> { pub fn raw_data(&self) -> Option<&[u8]> {
self.raw_data self.raw_data
} }
/// Convenience function to extract the value as a [str]. This is useful if the LV is /// Convenience function to extract the value as a [str]. This is useful if the LV is
/// known to contain a [str], for example being a file name. /// known to contain a [str], for example being a file name.
#[inline]
pub fn value_as_str(&self) -> Option<Result<&'data str, Utf8Error>> { pub fn value_as_str(&self) -> Option<Result<&'data str, Utf8Error>> {
if self.is_empty() { if self.is_empty() {
return None; return None;
@ -146,7 +135,6 @@ impl<'data> Lv<'data> {
} }
/// Reads a LV from a raw buffer. /// Reads a LV from a raw buffer.
#[inline]
pub fn from_bytes(buf: &'data [u8]) -> Result<Lv<'data>, ByteConversionError> { pub fn from_bytes(buf: &'data [u8]) -> Result<Lv<'data>, ByteConversionError> {
generic_len_check_deserialization(buf, MIN_LV_LEN)?; generic_len_check_deserialization(buf, MIN_LV_LEN)?;
Self::from_be_bytes_no_len_check(buf) Self::from_be_bytes_no_len_check(buf)
@ -163,7 +151,6 @@ impl<'data> Lv<'data> {
MIN_LV_LEN + self.data.len() MIN_LV_LEN + self.data.len()
} }
#[inline]
pub(crate) fn from_be_bytes_no_len_check( pub(crate) fn from_be_bytes_no_len_check(
buf: &'data [u8], buf: &'data [u8],
) -> Result<Lv<'data>, ByteConversionError> { ) -> Result<Lv<'data>, ByteConversionError> {
@ -178,10 +165,10 @@ impl<'data> Lv<'data> {
#[cfg(test)] #[cfg(test)]
pub mod tests { pub mod tests {
use super::*;
use alloc::string::ToString; use alloc::string::ToString;
use super::*; use crate::cfdp::TlvLvError;
use crate::ByteConversionError; use crate::ByteConversionError;
use std::string::String; use std::string::String;
@ -272,11 +259,15 @@ pub mod tests {
let lv = Lv::new(&data_big); let lv = Lv::new(&data_big);
assert!(lv.is_err()); assert!(lv.is_err());
let error = lv.unwrap_err(); let error = lv.unwrap_err();
assert_eq!(error.0, u8::MAX as usize + 1); if let TlvLvError::DataTooLarge(size) = error {
assert_eq!(size, u8::MAX as usize + 1);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"data with size 256 larger than allowed 255 bytes" "data with size 256 larger than allowed 255 bytes"
); );
} else {
panic!("invalid exception {:?}", error)
}
} }
#[test] #[test]

View File

@ -1,8 +1,11 @@
//! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf). //! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf).
use crate::ByteConversionError; use crate::ByteConversionError;
use core::fmt::{Display, Formatter};
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod lv; pub mod lv;
pub mod pdu; pub mod pdu;
@ -15,7 +18,6 @@ pub const CFDP_VERSION_2: u8 = 0b001;
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum PduType { pub enum PduType {
FileDirective = 0, FileDirective = 0,
@ -24,7 +26,6 @@ pub enum PduType {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum Direction { pub enum Direction {
TowardsReceiver = 0, TowardsReceiver = 0,
@ -33,7 +34,6 @@ pub enum Direction {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum TransmissionMode { pub enum TransmissionMode {
Acknowledged = 0, Acknowledged = 0,
@ -42,7 +42,6 @@ pub enum TransmissionMode {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum CrcFlag { pub enum CrcFlag {
NoCrc = 0, NoCrc = 0,
@ -70,7 +69,6 @@ impl From<CrcFlag> for bool {
/// Always 0 and ignored for File Directive PDUs (CCSDS 727.0-B-5 P.75) /// Always 0 and ignored for File Directive PDUs (CCSDS 727.0-B-5 P.75)
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum SegmentMetadataFlag { pub enum SegmentMetadataFlag {
NotPresent = 0, NotPresent = 0,
@ -80,7 +78,6 @@ pub enum SegmentMetadataFlag {
/// Always 0 and ignored for File Directive PDUs (CCSDS 727.0-B-5 P.75) /// Always 0 and ignored for File Directive PDUs (CCSDS 727.0-B-5 P.75)
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum SegmentationControl { pub enum SegmentationControl {
NoRecordBoundaryPreservation = 0, NoRecordBoundaryPreservation = 0,
@ -89,7 +86,6 @@ pub enum SegmentationControl {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum FaultHandlerCode { pub enum FaultHandlerCode {
NoticeOfCancellation = 0b0001, NoticeOfCancellation = 0b0001,
@ -100,7 +96,6 @@ pub enum FaultHandlerCode {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum ConditionCode { pub enum ConditionCode {
/// This is not an error condition for which a faulty handler override can be specified /// This is not an error condition for which a faulty handler override can be specified
@ -113,7 +108,7 @@ pub enum ConditionCode {
FileSizeError = 0b0110, FileSizeError = 0b0110,
NakLimitReached = 0b0111, NakLimitReached = 0b0111,
InactivityDetected = 0b1000, InactivityDetected = 0b1000,
CheckLimitReached = 0b1010, CheckLimitReached = 0b1001,
UnsupportedChecksumType = 0b1011, UnsupportedChecksumType = 0b1011,
/// Not an actual fault condition for which fault handler overrides can be specified /// Not an actual fault condition for which fault handler overrides can be specified
SuspendRequestReceived = 0b1110, SuspendRequestReceived = 0b1110,
@ -123,7 +118,6 @@ pub enum ConditionCode {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum LargeFileFlag { pub enum LargeFileFlag {
/// 32 bit maximum file size and FSS size /// 32 bit maximum file size and FSS size
@ -135,7 +129,6 @@ pub enum LargeFileFlag {
/// Transaction status for the ACK PDU field according to chapter 5.2.4 of the CFDP standard. /// Transaction status for the ACK PDU field according to chapter 5.2.4 of the CFDP standard.
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum TransactionStatus { pub enum TransactionStatus {
/// Transaction is not currently active and the CFDP implementation does not retain a /// Transaction is not currently active and the CFDP implementation does not retain a
@ -153,7 +146,6 @@ pub enum TransactionStatus {
/// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/) /// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/)
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum ChecksumType { pub enum ChecksumType {
/// Modular legacy checksum /// Modular legacy checksum
@ -173,43 +165,75 @@ impl Default for ChecksumType {
pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4]; pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4];
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] pub enum TlvLvError {
#[error("data with size {0} larger than allowed {max} bytes", max = u8::MAX)] DataTooLarge(usize),
pub struct TlvLvDataTooLargeError(pub usize); ByteConversion(ByteConversionError),
/// First value: Found value. Second value: Expected value if there is one. /// First value: Found value. Second value: Expected value if there is one.
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] InvalidTlvTypeField {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("invalid TLV type field, found {found}, expected {expected:?}")]
pub struct InvalidTlvTypeFieldError {
found: u8, found: u8,
expected: Option<u8>, expected: Option<u8>,
} },
/// Logically invalid value length detected. The value length may not exceed 255 bytes.
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] /// Depending on the concrete TLV type, the value length may also be logically invalid.
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum TlvLvError {
#[error("{0}")]
DataTooLarge(#[from] TlvLvDataTooLargeError),
#[error("byte conversion error: {0}")]
ByteConversion(#[from] ByteConversionError),
#[error("{0}")]
InvalidTlvTypeField(#[from] InvalidTlvTypeFieldError),
#[error("invalid value length {0}")]
InvalidValueLength(usize), InvalidValueLength(usize),
/// Only applies to filestore requests and responses. Second name was missing where one is /// Only applies to filestore requests and responses. Second name was missing where one is
/// expected. /// expected.
#[error("second name missing for filestore request or response")]
SecondNameMissing, SecondNameMissing,
/// Invalid action code for filestore requests or responses. /// Invalid action code for filestore requests or responses.
#[error("invalid action code {0}")]
InvalidFilestoreActionCode(u8), InvalidFilestoreActionCode(u8),
} }
impl From<ByteConversionError> for TlvLvError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl Display for TlvLvError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
TlvLvError::DataTooLarge(data_len) => {
write!(
f,
"data with size {} larger than allowed {} bytes",
data_len,
u8::MAX
)
}
TlvLvError::ByteConversion(e) => {
write!(f, "tlv or lv byte conversion: {}", e)
}
TlvLvError::InvalidTlvTypeField { found, expected } => {
write!(
f,
"invalid TLV type field, found {found}, expected {expected:?}"
)
}
TlvLvError::InvalidValueLength(len) => {
write!(f, "invalid value length {len}")
}
TlvLvError::SecondNameMissing => {
write!(f, "second name missing for filestore request or response")
}
TlvLvError::InvalidFilestoreActionCode(raw) => {
write!(f, "invalid filestore action code with raw value {raw}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for TlvLvError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
TlvLvError::ByteConversion(e) => Some(e),
_ => None,
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -15,7 +15,6 @@ use serde::{Deserialize, Serialize};
/// For more information, refer to CFDP chapter 5.2.4. /// For more information, refer to CFDP chapter 5.2.4.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct AckPdu { pub struct AckPdu {
pdu_header: PduHeader, pdu_header: PduHeader,
directive_code_of_acked_pdu: FileDirectiveType, directive_code_of_acked_pdu: FileDirectiveType,

View File

@ -15,7 +15,6 @@ use super::{CfdpPdu, WritablePduPacket};
/// For more information, refer to CFDP chapter 5.2.2. /// For more information, refer to CFDP chapter 5.2.2.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct EofPdu { pub struct EofPdu {
pdu_header: PduHeader, pdu_header: PduHeader,
condition_code: ConditionCode, condition_code: ConditionCode,
@ -25,36 +24,20 @@ pub struct EofPdu {
} }
impl EofPdu { impl EofPdu {
pub fn new( pub fn new_no_error(mut pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self {
mut pdu_header: PduHeader,
condition_code: ConditionCode,
file_checksum: u32,
file_size: u64,
fault_location: Option<EntityIdTlv>,
) -> Self {
// Force correct direction flag. // Force correct direction flag.
pdu_header.pdu_conf.direction = Direction::TowardsReceiver; pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
let mut eof_pdu = Self { let mut eof_pdu = Self {
pdu_header, pdu_header,
condition_code, condition_code: ConditionCode::NoError,
file_checksum, file_checksum,
file_size, file_size,
fault_location, fault_location: None,
}; };
eof_pdu.pdu_header.pdu_datafield_len = eof_pdu.calc_pdu_datafield_len() as u16; eof_pdu.pdu_header.pdu_datafield_len = eof_pdu.calc_pdu_datafield_len() as u16;
eof_pdu eof_pdu
} }
pub fn new_no_error(pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self {
Self::new(
pdu_header,
ConditionCode::NoError,
file_checksum,
file_size,
None,
)
}
pub fn pdu_header(&self) -> &PduHeader { pub fn pdu_header(&self) -> &PduHeader {
&self.pdu_header &self.pdu_header
} }
@ -164,7 +147,7 @@ impl WritablePduPacket for EofPdu {
&mut buf[current_idx..], &mut buf[current_idx..],
)?; )?;
if let Some(fault_location) = self.fault_location { if let Some(fault_location) = self.fault_location {
current_idx += fault_location.write_to_bytes(&mut buf[current_idx..])?; current_idx += fault_location.write_to_bytes(buf)?;
} }
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
current_idx = add_pdu_crc(buf, current_idx); current_idx = add_pdu_crc(buf, current_idx);
@ -187,23 +170,13 @@ mod tests {
use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode}; use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use crate::tests::generic_serde_test; use crate::tests::generic_serde_test;
use crate::util::{UnsignedByteFieldU16, UnsignedEnum};
fn verify_state_no_error_no_crc(eof_pdu: &EofPdu, file_flag: LargeFileFlag) { fn verify_state(&eof_pdu: &EofPdu, file_flag: LargeFileFlag) {
verify_state(eof_pdu, CrcFlag::NoCrc, file_flag, ConditionCode::NoError);
}
fn verify_state(
eof_pdu: &EofPdu,
crc_flag: CrcFlag,
file_flag: LargeFileFlag,
cond_code: ConditionCode,
) {
assert_eq!(eof_pdu.file_checksum(), 0x01020304); assert_eq!(eof_pdu.file_checksum(), 0x01020304);
assert_eq!(eof_pdu.file_size(), 12); assert_eq!(eof_pdu.file_size(), 12);
assert_eq!(eof_pdu.condition_code(), cond_code); assert_eq!(eof_pdu.condition_code(), ConditionCode::NoError);
assert_eq!(eof_pdu.crc_flag(), crc_flag); assert_eq!(eof_pdu.crc_flag(), CrcFlag::NoCrc);
assert_eq!(eof_pdu.file_flag(), file_flag); assert_eq!(eof_pdu.file_flag(), file_flag);
assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective); assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective);
assert_eq!( assert_eq!(
@ -223,7 +196,7 @@ mod tests {
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4); assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4);
verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Normal); verify_state(&eof_pdu, LargeFileFlag::Normal);
} }
#[test] #[test]
@ -297,7 +270,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = EofPdu::from_bytes(&buf).unwrap_err(); let error = EofPdu::from_bytes(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");
@ -309,7 +282,7 @@ mod tests {
let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large); let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large);
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Large); verify_state(&eof_pdu, LargeFileFlag::Large);
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4); assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4);
} }
@ -321,48 +294,4 @@ mod tests {
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
generic_serde_test(eof_pdu); generic_serde_test(eof_pdu);
} }
fn generic_test_with_fault_location_and_error(crc: CrcFlag) {
let pdu_conf = common_pdu_conf(crc, LargeFileFlag::Normal);
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
let eof_pdu = EofPdu::new(
pdu_header,
ConditionCode::FileChecksumFailure,
0x01020304,
12,
Some(EntityIdTlv::new(UnsignedByteFieldU16::new(5).into())),
);
let mut expected_len = pdu_header.header_len() + 2 + 4 + 4 + 4;
if crc == CrcFlag::WithCrc {
expected_len += 2;
}
// Entity ID TLV increaes length by 4.
assert_eq!(eof_pdu.len_written(), expected_len);
verify_state(
&eof_pdu,
crc,
LargeFileFlag::Normal,
ConditionCode::FileChecksumFailure,
);
let eof_vec = eof_pdu.to_vec().unwrap();
let eof_read_back = EofPdu::from_bytes(&eof_vec);
if let Err(e) = eof_read_back {
panic!("deserialization failed with: {e}")
}
let eof_read_back = eof_read_back.unwrap();
assert_eq!(eof_read_back, eof_pdu);
assert!(eof_read_back.fault_location.is_some());
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().value(), 5);
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().size(), 2);
}
#[test]
fn test_with_fault_location_and_error() {
generic_test_with_fault_location_and_error(CrcFlag::NoCrc);
}
#[test]
fn test_with_fault_location_and_error_and_crc() {
generic_test_with_fault_location_and_error(CrcFlag::WithCrc);
}
} }

View File

@ -92,67 +92,16 @@ impl<'seg_meta> SegmentMetadata<'seg_meta> {
} }
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
struct FdPduBase<'seg_meta> {
pdu_header: PduHeader,
#[cfg_attr(feature = "serde", serde(borrow))]
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
offset: u64,
}
impl CfdpPdu for FdPduBase<'_> {
fn pdu_header(&self) -> &PduHeader {
&self.pdu_header
}
fn file_directive_type(&self) -> Option<FileDirectiveType> {
None
}
}
impl FdPduBase<'_> {
fn calc_pdu_datafield_len(&self, file_data_len: u64) -> usize {
let mut len = core::mem::size_of::<u32>();
if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large {
len += 4;
}
if self.segment_metadata.is_some() {
len += self.segment_metadata.as_ref().unwrap().written_len()
}
len += file_data_len as usize;
if self.crc_flag() == CrcFlag::WithCrc {
len += 2;
}
len
}
fn write_common_fields_to_bytes(&self, buf: &mut [u8]) -> Result<usize, PduError> {
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
if self.segment_metadata.is_some() {
current_idx += self
.segment_metadata
.as_ref()
.unwrap()
.write_to_bytes(&mut buf[current_idx..])?;
}
current_idx += write_fss_field(
self.pdu_header.common_pdu_conf().file_flag,
self.offset,
&mut buf[current_idx..],
)?;
Ok(current_idx)
}
}
/// File Data PDU abstraction. /// File Data PDU abstraction.
/// ///
/// For more information, refer to CFDP chapter 5.3. /// For more information, refer to CFDP chapter 5.3.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct FileDataPdu<'seg_meta, 'file_data> { pub struct FileDataPdu<'seg_meta, 'file_data> {
pdu_header: PduHeader,
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
common: FdPduBase<'seg_meta>, segment_metadata: Option<SegmentMetadata<'seg_meta>>,
offset: u64,
file_data: &'file_data [u8], file_data: &'file_data [u8],
} }
@ -185,34 +134,42 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present; pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
} }
let mut pdu = Self { let mut pdu = Self {
common: FdPduBase {
pdu_header, pdu_header,
segment_metadata, segment_metadata,
offset, offset,
},
file_data, file_data,
}; };
pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16; pdu.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
pdu pdu
} }
fn calc_pdu_datafield_len(&self) -> usize { fn calc_pdu_datafield_len(&self) -> usize {
self.common let mut len = core::mem::size_of::<u32>();
.calc_pdu_datafield_len(self.file_data.len() as u64) if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large {
len += 4;
} }
if self.segment_metadata.is_some() {
pub fn segment_metadata(&self) -> Option<&SegmentMetadata> { len += self.segment_metadata.as_ref().unwrap().written_len()
self.common.segment_metadata.as_ref() }
len += self.file_data.len();
if self.crc_flag() == CrcFlag::WithCrc {
len += 2;
}
len
} }
pub fn offset(&self) -> u64 { pub fn offset(&self) -> u64 {
self.common.offset self.offset
} }
pub fn file_data(&self) -> &'file_data [u8] { pub fn file_data(&self) -> &'file_data [u8] {
self.file_data self.file_data
} }
pub fn segment_metadata(&self) -> Option<&SegmentMetadata> {
self.segment_metadata.as_ref()
}
pub fn from_bytes<'buf: 'seg_meta + 'file_data>(buf: &'buf [u8]) -> Result<Self, PduError> { pub fn from_bytes<'buf: 'seg_meta + 'file_data>(buf: &'buf [u8]) -> Result<Self, PduError> {
let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?; let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?;
let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?; let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?;
@ -233,18 +190,16 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
.into()); .into());
} }
Ok(Self { Ok(Self {
common: FdPduBase {
pdu_header, pdu_header,
segment_metadata, segment_metadata,
offset, offset,
},
file_data: &buf[current_idx..full_len_without_crc], file_data: &buf[current_idx..full_len_without_crc],
}) })
} }
} }
impl CfdpPdu for FileDataPdu<'_, '_> { impl CfdpPdu for FileDataPdu<'_, '_> {
fn pdu_header(&self) -> &PduHeader { fn pdu_header(&self) -> &PduHeader {
&self.common.pdu_header &self.pdu_header
} }
fn file_directive_type(&self) -> Option<FileDirectiveType> { fn file_directive_type(&self) -> Option<FileDirectiveType> {
@ -261,8 +216,19 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
} }
.into()); .into());
} }
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
let mut current_idx = self.common.write_common_fields_to_bytes(buf)?; if self.segment_metadata.is_some() {
current_idx += self
.segment_metadata
.as_ref()
.unwrap()
.write_to_bytes(&mut buf[current_idx..])?;
}
current_idx += write_fss_field(
self.pdu_header.common_pdu_conf().file_flag,
self.offset,
&mut buf[current_idx..],
)?;
buf[current_idx..current_idx + self.file_data.len()].copy_from_slice(self.file_data); buf[current_idx..current_idx + self.file_data.len()].copy_from_slice(self.file_data);
current_idx += self.file_data.len(); current_idx += self.file_data.len();
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
@ -272,167 +238,10 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
} }
fn len_written(&self) -> usize { fn len_written(&self) -> usize {
self.common.pdu_header.header_len() + self.calc_pdu_datafield_len() self.pdu_header.header_len() + self.calc_pdu_datafield_len()
} }
} }
/// File Data PDU creator abstraction.
///
/// This special creator object allows to read into the file data buffer directly. This avoids
/// the need of an additional buffer to create a file data PDU. This structure therefore
/// does not implement the regular [WritablePduPacket] trait.
///
/// For more information, refer to CFDP chapter 5.3.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct FileDataPduCreatorWithReservedDatafield<'seg_meta> {
#[cfg_attr(feature = "serde", serde(borrow))]
common: FdPduBase<'seg_meta>,
file_data_len: u64,
}
impl<'seg_meta> FileDataPduCreatorWithReservedDatafield<'seg_meta> {
pub fn new_with_seg_metadata(
pdu_header: PduHeader,
segment_metadata: SegmentMetadata<'seg_meta>,
offset: u64,
file_data_len: u64,
) -> Self {
Self::new_generic(pdu_header, Some(segment_metadata), offset, file_data_len)
}
pub fn new_no_seg_metadata(pdu_header: PduHeader, offset: u64, file_data_len: u64) -> Self {
Self::new_generic(pdu_header, None, offset, file_data_len)
}
pub fn new_generic(
mut pdu_header: PduHeader,
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
offset: u64,
file_data_len: u64,
) -> Self {
pdu_header.pdu_type = PduType::FileData;
if segment_metadata.is_some() {
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
}
let mut pdu = Self {
common: FdPduBase {
pdu_header,
segment_metadata,
offset,
},
file_data_len,
};
pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
pdu
}
fn calc_pdu_datafield_len(&self) -> usize {
self.common.calc_pdu_datafield_len(self.file_data_len)
}
pub fn len_written(&self) -> usize {
self.common.pdu_header.header_len() + self.calc_pdu_datafield_len()
}
/// This function performs a partial write by writing all data except the file data
/// and the CRC.
///
/// It returns a [FileDataPduCreatorWithUnwrittenData] which provides a mutable slice to
/// the reserved file data field. The user can read file data into this field directly and
/// then finish the PDU creation using the [FileDataPduCreatorWithUnwrittenData::finish] call.
pub fn write_to_bytes_partially<'buf>(
&self,
buf: &'buf mut [u8],
) -> Result<FileDataPduCreatorWithUnwrittenData<'buf>, PduError> {
if buf.len() < self.len_written() {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: self.len_written(),
}
.into());
}
let mut current_idx = self.common.write_common_fields_to_bytes(buf)?;
let file_data_offset = current_idx as u64;
current_idx += self.file_data_len as usize;
if self.crc_flag() == CrcFlag::WithCrc {
current_idx += 2;
}
Ok(FileDataPduCreatorWithUnwrittenData {
write_buf: &mut buf[0..current_idx],
file_data_offset,
file_data_len: self.file_data_len,
needs_crc: self.crc_flag() == CrcFlag::WithCrc,
})
}
}
impl CfdpPdu for FileDataPduCreatorWithReservedDatafield<'_> {
fn pdu_header(&self) -> &PduHeader {
&self.common.pdu_header
}
fn file_directive_type(&self) -> Option<FileDirectiveType> {
None
}
}
/// This structure is created with [FileDataPduCreatorWithReservedDatafield::write_to_bytes_partially]
/// and provides an API to read file data from the virtual filesystem into the file data PDU buffer
/// directly.
///
/// This structure provides a mutable slice to the reserved file data field. The user can read
/// file data into this field directly and then finish the PDU creation using the
/// [FileDataPduCreatorWithUnwrittenData::finish] call.
pub struct FileDataPduCreatorWithUnwrittenData<'buf> {
write_buf: &'buf mut [u8],
file_data_offset: u64,
file_data_len: u64,
needs_crc: bool,
}
impl FileDataPduCreatorWithUnwrittenData<'_> {
pub fn file_data_field_mut(&mut self) -> &mut [u8] {
&mut self.write_buf[self.file_data_offset as usize
..self.file_data_offset as usize + self.file_data_len as usize]
}
/// This functio needs to be called to add a CRC to the file data PDU where applicable.
///
/// It returns the full written size of the PDU.
pub fn finish(self) -> usize {
if self.needs_crc {
add_pdu_crc(
self.write_buf,
self.file_data_offset as usize + self.file_data_len as usize,
);
}
self.write_buf.len()
}
}
/// This function can be used to calculate the maximum allowed file segment size for
/// a given maximum packet length and the segment metadata if there is any.
pub fn calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(
pdu_header: &PduHeader,
max_packet_len: usize,
segment_metadata: Option<&SegmentMetadata>,
) -> usize {
let mut subtract = pdu_header.header_len();
if segment_metadata.is_some() {
subtract += 1 + segment_metadata.as_ref().unwrap().metadata().unwrap().len();
}
if pdu_header.common_pdu_conf().file_flag == LargeFileFlag::Large {
subtract += 8;
} else {
subtract += 4;
}
if pdu_header.common_pdu_conf().crc_flag == CrcFlag::WithCrc {
subtract += 2;
}
max_packet_len.saturating_sub(subtract)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -454,7 +263,7 @@ mod tests {
assert!(fd_pdu.segment_metadata().is_none()); assert!(fd_pdu.segment_metadata().is_none());
assert_eq!( assert_eq!(
fd_pdu.len_written(), fd_pdu.len_written(),
fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4 fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4
); );
assert_eq!(fd_pdu.crc_flag(), CrcFlag::NoCrc); assert_eq!(fd_pdu.crc_flag(), CrcFlag::NoCrc);
@ -481,11 +290,11 @@ mod tests {
let written = res.unwrap(); let written = res.unwrap();
assert_eq!( assert_eq!(
written, written,
fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4 fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4
); );
let mut current_idx = fd_pdu.pdu_header().header_len(); let mut current_idx = fd_pdu.pdu_header.header_len();
let file_size = u32::from_be_bytes( let file_size = u32::from_be_bytes(
buf[fd_pdu.pdu_header().header_len()..fd_pdu.pdu_header().header_len() + 4] buf[fd_pdu.pdu_header.header_len()..fd_pdu.pdu_header.header_len() + 4]
.try_into() .try_into()
.unwrap(), .unwrap(),
); );
@ -544,7 +353,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16;
let error = FileDataPdu::from_bytes(&buf).unwrap_err(); let error = FileDataPdu::from_bytes(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");
@ -571,7 +380,7 @@ mod tests {
assert_eq!(*fd_pdu.segment_metadata().unwrap(), segment_meta); assert_eq!(*fd_pdu.segment_metadata().unwrap(), segment_meta);
assert_eq!( assert_eq!(
fd_pdu.len_written(), fd_pdu.len_written(),
fd_pdu.pdu_header().header_len() fd_pdu.pdu_header.header_len()
+ 1 + 1
+ seg_metadata.len() + seg_metadata.len()
+ core::mem::size_of::<u32>() + core::mem::size_of::<u32>()
@ -581,7 +390,7 @@ mod tests {
fd_pdu fd_pdu
.write_to_bytes(&mut buf) .write_to_bytes(&mut buf)
.expect("writing FD PDU failed"); .expect("writing FD PDU failed");
let mut current_idx = fd_pdu.pdu_header().header_len(); let mut current_idx = fd_pdu.pdu_header.header_len();
assert_eq!( assert_eq!(
RecordContinuationState::try_from((buf[current_idx] >> 6) & 0b11).unwrap(), RecordContinuationState::try_from((buf[current_idx] >> 6) & 0b11).unwrap(),
RecordContinuationState::StartAndEnd RecordContinuationState::StartAndEnd
@ -673,142 +482,4 @@ mod tests {
let output_converted_back: FileDataPdu = from_bytes(&output).unwrap(); let output_converted_back: FileDataPdu = from_bytes(&output).unwrap();
assert_eq!(output_converted_back, fd_pdu); assert_eq!(output_converted_back, fd_pdu);
} }
#[test]
fn test_fd_pdu_creator_with_reserved_field_no_crc() {
let common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
pdu_unwritten.finish();
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
assert_eq!(
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
"hello world!"
);
}
#[test]
fn test_fd_pdu_creator_with_reserved_field_with_crc() {
let mut common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
common_conf.crc_flag = true.into();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
pdu_unwritten.finish();
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
assert_eq!(
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
"hello world!"
);
}
#[test]
fn test_fd_pdu_creator_with_reserved_field_with_crc_without_finish_fails() {
let mut common_conf =
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
common_conf.crc_flag = true.into();
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
let test_str = "hello world!";
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
pdu_header,
10,
test_str.len() as u64,
);
let mut write_buf: [u8; 64] = [0; 64];
let mut pdu_unwritten = fd_pdu
.write_to_bytes_partially(&mut write_buf)
.expect("partial write failed");
pdu_unwritten
.file_data_field_mut()
.copy_from_slice(test_str.as_bytes());
let pdu_reader_error = FileDataPdu::from_bytes(&write_buf);
assert!(pdu_reader_error.is_err());
let error = pdu_reader_error.unwrap_err();
match error {
PduError::Checksum(_) => (),
_ => {
panic!("unexpected PDU error {}", error)
}
}
}
#[test]
fn test_max_file_seg_calculator_0() {
let pdu_header = PduHeader::new_for_file_data_default(CommonPduConfig::default(), 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
53
);
}
#[test]
fn test_max_file_seg_calculator_1() {
let common_conf = CommonPduConfig {
crc_flag: CrcFlag::WithCrc,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
51
);
}
#[test]
fn test_max_file_seg_calculator_2() {
let common_conf = CommonPduConfig {
file_flag: LargeFileFlag::Large,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
49
);
}
#[test]
fn test_max_file_seg_calculator_saturating_sub() {
let common_conf = CommonPduConfig {
file_flag: LargeFileFlag::Large,
..Default::default()
};
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 15, None),
0
);
assert_eq!(
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 14, None),
0
);
}
} }

View File

@ -4,18 +4,16 @@ use crate::cfdp::pdu::{
use crate::cfdp::tlv::{ use crate::cfdp::tlv::{
EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv, EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv,
}; };
use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType}; use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType, TlvLvError};
use crate::ByteConversionError; use crate::ByteConversionError;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv; use super::{CfdpPdu, WritablePduPacket};
use super::{CfdpPdu, InvalidTlvTypeFieldError, WritablePduPacket};
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum DeliveryCode { pub enum DeliveryCode {
Complete = 0, Complete = 0,
@ -24,7 +22,6 @@ pub enum DeliveryCode {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum FileStatus { pub enum FileStatus {
DiscardDeliberately = 0b00, DiscardDeliberately = 0b00,
@ -37,7 +34,6 @@ pub enum FileStatus {
/// ///
/// For more information, refer to CFDP chapter 5.2.3. /// For more information, refer to CFDP chapter 5.2.3.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct FinishedPduCreator<'fs_responses> { pub struct FinishedPduCreator<'fs_responses> {
pdu_header: PduHeader, pdu_header: PduHeader,
condition_code: ConditionCode, condition_code: ConditionCode,
@ -223,7 +219,6 @@ impl<'buf> Iterator for FilestoreResponseIterator<'buf> {
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct FinishedPduReader<'buf> { pub struct FinishedPduReader<'buf> {
pdu_header: PduHeader, pdu_header: PduHeader,
condition_code: ConditionCode, condition_code: ConditionCode,
@ -332,26 +327,22 @@ impl<'buf> FinishedPduReader<'buf> {
// last TLV, everything else would break the whole handling of the packet // last TLV, everything else would break the whole handling of the packet
// TLVs. // TLVs.
if current_idx != full_len_without_crc { if current_idx != full_len_without_crc {
return Err(PduError::Format); return Err(PduError::FormatError);
} }
} else { } else {
return Err(PduError::TlvLv( return Err(TlvLvError::InvalidTlvTypeField {
InvalidTlvTypeFieldError {
found: tlv_type.into(), found: tlv_type.into(),
expected: Some(TlvType::FilestoreResponse.into()), expected: Some(TlvType::FilestoreResponse.into()),
} }
.into(), .into());
));
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(PduError::TlvLv( return Err(TlvLvError::InvalidTlvTypeField {
InvalidTlvTypeFieldError {
found: raw, found: raw,
expected: None, expected: None,
} }
.into(), .into());
));
} }
} }
} }
@ -568,7 +559,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = FinishedPduReader::new(&buf).unwrap_err(); let error = FinishedPduReader::new(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");

View File

@ -1,5 +1,3 @@
#[cfg(feature = "alloc")]
use super::tlv::TlvOwned;
use crate::cfdp::lv::Lv; use crate::cfdp::lv::Lv;
use crate::cfdp::pdu::{ use crate::cfdp::pdu::{
add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field, add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field,
@ -13,12 +11,10 @@ use alloc::vec::Vec;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv;
use super::{CfdpPdu, WritablePduPacket}; use super::{CfdpPdu, WritablePduPacket};
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] #[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct MetadataGenericParams { pub struct MetadataGenericParams {
pub closure_requested: bool, pub closure_requested: bool,
pub checksum_type: ChecksumType, pub checksum_type: ChecksumType,
@ -54,15 +50,6 @@ pub fn build_metadata_opts_from_vec(
build_metadata_opts_from_slice(buf, tlvs.as_slice()) build_metadata_opts_from_slice(buf, tlvs.as_slice())
} }
#[cfg(feature = "alloc")]
pub fn build_metadata_opts_from_owned_slice(tlvs: &[TlvOwned]) -> Vec<u8> {
let mut sum_vec = Vec::new();
for tlv in tlvs {
sum_vec.extend(tlv.to_vec());
}
sum_vec
}
/// Metadata PDU creator abstraction. /// Metadata PDU creator abstraction.
/// ///
/// This abstraction exposes a specialized API for creating metadata PDUs as specified in /// This abstraction exposes a specialized API for creating metadata PDUs as specified in
@ -73,7 +60,7 @@ pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> {
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [u8], options: &'opts [Tlv<'opts>],
} }
impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'opts> { impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'opts> {
@ -97,7 +84,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [u8], options: &'opts [Tlv<'opts>],
) -> Self { ) -> Self {
Self::new( Self::new(
pdu_header, pdu_header,
@ -113,7 +100,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
src_file_name: Lv<'src_name>, src_file_name: Lv<'src_name>,
dest_file_name: Lv<'dest_name>, dest_file_name: Lv<'dest_name>,
options: &'opts [u8], options: &'opts [Tlv<'opts>],
) -> Self { ) -> Self {
pdu_header.pdu_type = PduType::FileDirective; pdu_header.pdu_type = PduType::FileDirective;
pdu_header.pdu_conf.direction = Direction::TowardsReceiver; pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
@ -140,19 +127,10 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
self.dest_file_name self.dest_file_name
} }
pub fn options(&self) -> &'opts [u8] { pub fn options(&self) -> &'opts [Tlv<'opts>] {
self.options self.options
} }
/// Yield an iterator which can be used to loop through all options. Returns [None] if the
/// options field is empty.
pub fn options_iter(&self) -> OptionsIter<'_> {
OptionsIter {
opt_buf: self.options,
current_idx: 0,
}
}
fn calc_pdu_datafield_len(&self) -> usize { fn calc_pdu_datafield_len(&self) -> usize {
// One directve type octet and one byte of the directive parameter field. // One directve type octet and one byte of the directive parameter field.
let mut len = 2; let mut len = 2;
@ -163,7 +141,9 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
} }
len += self.src_file_name.len_full(); len += self.src_file_name.len_full();
len += self.dest_file_name.len_full(); len += self.dest_file_name.len_full();
len += self.options().len(); for tlv in self.options() {
len += tlv.len_full()
}
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
len += 2; len += 2;
} }
@ -209,8 +189,10 @@ impl WritablePduPacket for MetadataPduCreator<'_, '_, '_> {
current_idx += self current_idx += self
.dest_file_name .dest_file_name
.write_to_be_bytes(&mut buf[current_idx..])?; .write_to_be_bytes(&mut buf[current_idx..])?;
buf[current_idx..current_idx + self.options.len()].copy_from_slice(self.options); for opt in self.options() {
current_idx += self.options.len(); opt.write_to_bytes(&mut buf[current_idx..current_idx + opt.len_full()])?;
current_idx += opt.len_full();
}
if self.crc_flag() == CrcFlag::WithCrc { if self.crc_flag() == CrcFlag::WithCrc {
current_idx = add_pdu_crc(buf, current_idx); current_idx = add_pdu_crc(buf, current_idx);
} }
@ -259,7 +241,6 @@ impl<'opts> Iterator for OptionsIter<'opts> {
/// involved. /// involved.
#[derive(Debug)] #[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct MetadataPduReader<'buf> { pub struct MetadataPduReader<'buf> {
pdu_header: PduHeader, pdu_header: PduHeader,
metadata_params: MetadataGenericParams, metadata_params: MetadataGenericParams,
@ -371,7 +352,7 @@ pub mod tests {
}; };
use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket}; use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket};
use crate::cfdp::pdu::{FileDirectiveType, PduHeader}; use crate::cfdp::pdu::{FileDirectiveType, PduHeader};
use crate::cfdp::tlv::{ReadableTlv, Tlv, TlvOwned, TlvType, WritableTlv}; use crate::cfdp::tlv::{Tlv, TlvType};
use crate::cfdp::{ use crate::cfdp::{
ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag, ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag,
SegmentationControl, TransmissionMode, SegmentationControl, TransmissionMode,
@ -381,16 +362,16 @@ pub mod tests {
const SRC_FILENAME: &str = "hello-world.txt"; const SRC_FILENAME: &str = "hello-world.txt";
const DEST_FILENAME: &str = "hello-world2.txt"; const DEST_FILENAME: &str = "hello-world2.txt";
fn generic_metadata_pdu( fn generic_metadata_pdu<'opts>(
crc_flag: CrcFlag, crc_flag: CrcFlag,
checksum_type: ChecksumType, checksum_type: ChecksumType,
closure_requested: bool, closure_requested: bool,
fss: LargeFileFlag, fss: LargeFileFlag,
opts: &[u8], opts: &'opts [Tlv],
) -> ( ) -> (
Lv<'static>, Lv<'static>,
Lv<'static>, Lv<'static>,
MetadataPduCreator<'static, 'static, '_>, MetadataPduCreator<'static, 'static, 'opts>,
) { ) {
let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0); let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0);
let metadata_params = MetadataGenericParams::new(closure_requested, checksum_type, 0x1010); let metadata_params = MetadataGenericParams::new(closure_requested, checksum_type, 0x1010);
@ -560,9 +541,9 @@ pub mod tests {
assert_eq!(written.metadata_params(), read.metadata_params()); assert_eq!(written.metadata_params(), read.metadata_params());
assert_eq!(written.src_file_name(), read.src_file_name()); assert_eq!(written.src_file_name(), read.src_file_name());
assert_eq!(written.dest_file_name(), read.dest_file_name()); assert_eq!(written.dest_file_name(), read.dest_file_name());
let opts = written.options_iter(); let opts = written.options();
for (tlv_written, tlv_read) in opts.zip(read.options_iter().unwrap()) { for (tlv_written, tlv_read) in opts.iter().zip(read.options_iter().unwrap()) {
assert_eq!(&tlv_written, &tlv_read); assert_eq!(tlv_written, &tlv_read);
} }
} }
@ -677,14 +658,14 @@ pub mod tests {
let tlv1 = Tlv::new_empty(TlvType::FlowLabel); let tlv1 = Tlv::new_empty(TlvType::FlowLabel);
let msg_to_user: [u8; 4] = [1, 2, 3, 4]; let msg_to_user: [u8; 4] = [1, 2, 3, 4];
let tlv2 = Tlv::new(TlvType::MsgToUser, &msg_to_user).unwrap(); let tlv2 = Tlv::new(TlvType::MsgToUser, &msg_to_user).unwrap();
let mut tlv_buf: [u8; 64] = [0; 64]; let tlv_vec = vec![tlv1, tlv2];
let opts_len = build_metadata_opts_from_slice(&mut tlv_buf, &[tlv1, tlv2]).unwrap(); let opts_len = tlv1.len_full() + tlv2.len_full();
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu( let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
CrcFlag::NoCrc, CrcFlag::NoCrc,
ChecksumType::Crc32, ChecksumType::Crc32,
false, false,
LargeFileFlag::Normal, LargeFileFlag::Normal,
&tlv_buf[0..opts_len], &tlv_vec,
); );
let mut buf: [u8; 128] = [0; 128]; let mut buf: [u8; 128] = [0; 128];
let write_res = metadata_pdu.write_to_bytes(&mut buf); let write_res = metadata_pdu.write_to_bytes(&mut buf);
@ -707,55 +688,7 @@ pub mod tests {
let opts_iter = opts_iter.unwrap(); let opts_iter = opts_iter.unwrap();
let mut accumulated_len = 0; let mut accumulated_len = 0;
for (idx, opt) in opts_iter.enumerate() { for (idx, opt) in opts_iter.enumerate() {
if idx == 0 { assert_eq!(tlv_vec[idx], opt);
assert_eq!(tlv1, opt);
} else if idx == 1 {
assert_eq!(tlv2, opt);
}
accumulated_len += opt.len_full();
}
assert_eq!(accumulated_len, pdu_read_back.options().len());
}
#[test]
fn test_with_owned_opts() {
let tlv1 = TlvOwned::new_empty(TlvType::FlowLabel);
let msg_to_user: [u8; 4] = [1, 2, 3, 4];
let tlv2 = TlvOwned::new(TlvType::MsgToUser, &msg_to_user);
let mut all_tlvs = tlv1.to_vec();
all_tlvs.extend(tlv2.to_vec());
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
CrcFlag::NoCrc,
ChecksumType::Crc32,
false,
LargeFileFlag::Normal,
&all_tlvs,
);
let mut buf: [u8; 128] = [0; 128];
let write_res = metadata_pdu.write_to_bytes(&mut buf);
assert!(write_res.is_ok());
let written = write_res.unwrap();
assert_eq!(
written,
metadata_pdu.pdu_header.header_len()
+ 1
+ 1
+ 4
+ src_filename.len_full()
+ dest_filename.len_full()
+ all_tlvs.len()
);
let pdu_read_back = MetadataPduReader::from_bytes(&buf).unwrap();
compare_read_pdu_to_written_pdu(&metadata_pdu, &pdu_read_back);
let opts_iter = pdu_read_back.options_iter();
assert!(opts_iter.is_some());
let opts_iter = opts_iter.unwrap();
let mut accumulated_len = 0;
for (idx, opt) in opts_iter.enumerate() {
if idx == 0 {
assert_eq!(tlv1, opt);
} else if idx == 1 {
assert_eq!(tlv2, opt);
}
accumulated_len += opt.len_full(); accumulated_len += opt.len_full();
} }
assert_eq!(accumulated_len, pdu_read_back.options().len()); assert_eq!(accumulated_len, pdu_read_back.options().len());
@ -780,7 +713,7 @@ pub mod tests {
assert_eq!(expected, Some(FileDirectiveType::MetadataPdu)); assert_eq!(expected, Some(FileDirectiveType::MetadataPdu));
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid directive type, found 255, expected Some(MetadataPdu)" "invalid directive type value 255, expected Some(MetadataPdu)"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);
@ -806,7 +739,7 @@ pub mod tests {
assert_eq!(expected, FileDirectiveType::MetadataPdu); assert_eq!(expected, FileDirectiveType::MetadataPdu);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"wrong directive type, found EofPdu, expected MetadataPdu" "found directive type EofPdu, expected MetadataPdu"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);

View File

@ -5,6 +5,9 @@ use crate::ByteConversionError;
use crate::CRC_CCITT_FALSE; use crate::CRC_CCITT_FALSE;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::{Display, Formatter};
#[cfg(feature = "std")]
use std::error::Error;
pub mod ack; pub mod ack;
pub mod eof; pub mod eof;
@ -15,7 +18,6 @@ pub mod nak;
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum FileDirectiveType { pub enum FileDirectiveType {
EofPdu = 0x04, EofPdu = 0x04,
@ -27,62 +29,135 @@ pub enum FileDirectiveType {
KeepAlivePdu = 0x0c, KeepAlivePdu = 0x0c,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PduError { pub enum PduError {
#[error("byte conversion error: {0}")] ByteConversion(ByteConversionError),
ByteConversion(#[from] ByteConversionError), /// Found version ID invalid, not equal to [CFDP_VERSION_2].
/// Found version ID invalid, not equal to [super::CFDP_VERSION_2].
#[error("CFDP version missmatch, found {0}, expected {ver}", ver = super::CFDP_VERSION_2)]
CfdpVersionMissmatch(u8), CfdpVersionMissmatch(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid PDU entity ID length {0}, only [1, 2, 4, 8] are allowed")]
InvalidEntityLen(u8), InvalidEntityLen(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid transaction ID length {0}")]
InvalidTransactionSeqNumLen(u8), InvalidTransactionSeqNumLen(u8),
#[error(
"missmatch of PDU source ID length {src_id_len} and destination ID length {dest_id_len}"
)]
SourceDestIdLenMissmatch { SourceDestIdLenMissmatch {
src_id_len: usize, src_id_len: usize,
dest_id_len: usize, dest_id_len: usize,
}, },
/// Wrong directive type, for example when parsing the directive field for a file directive /// Wrong directive type, for example when parsing the directive field for a file directive
/// PDU. /// PDU.
#[error("wrong directive type, found {found:?}, expected {expected:?}")]
WrongDirectiveType { WrongDirectiveType {
found: FileDirectiveType, found: FileDirectiveType,
expected: FileDirectiveType, expected: FileDirectiveType,
}, },
/// The directive type field contained a value not in the range of permitted values. This can /// The directive type field contained a value not in the range of permitted values. This can
/// also happen if an invalid value is passed to the ACK PDU constructor. /// also happen if an invalid value is passed to the ACK PDU constructor.
#[error("invalid directive type, found {found:?}, expected {expected:?}")]
InvalidDirectiveType { InvalidDirectiveType {
found: u8, found: u8,
expected: Option<FileDirectiveType>, expected: Option<FileDirectiveType>,
}, },
#[error("invalid start or end of scope value for NAK PDU")]
InvalidStartOrEndOfScopeValue, InvalidStartOrEndOfScopeValue,
/// Invalid condition code. Contains the raw detected value. /// Invalid condition code. Contains the raw detected value.
#[error("invalid condition code {0}")]
InvalidConditionCode(u8), InvalidConditionCode(u8),
/// Invalid checksum type which is not part of the checksums listed in the /// Invalid checksum type which is not part of the checksums listed in the
/// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/). /// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/).
#[error("invalid checksum type {0}")]
InvalidChecksumType(u8), InvalidChecksumType(u8),
#[error("file size {0} too large")]
FileSizeTooLarge(u64), FileSizeTooLarge(u64),
/// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC. /// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC.
#[error("checksum error for checksum {0}")] ChecksumError(u16),
Checksum(u16),
/// Generic error for invalid PDU formats. /// Generic error for invalid PDU formats.
#[error("generic PDU format error")] FormatError,
Format,
/// Error handling a TLV field. /// Error handling a TLV field.
#[error("PDU error: {0}")] TlvLvError(TlvLvError),
TlvLv(#[from] TlvLvError), }
impl Display for PduError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PduError::InvalidEntityLen(raw_id) => {
write!(
f,
"invalid PDU entity ID length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::InvalidStartOrEndOfScopeValue => {
write!(f, "invalid start or end of scope for NAK PDU")
}
PduError::InvalidTransactionSeqNumLen(raw_id) => {
write!(
f,
"invalid PDUtransaction seq num length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::CfdpVersionMissmatch(raw) => {
write!(
f,
"cfdp version missmatch, found {raw}, expected {CFDP_VERSION_2}"
)
}
PduError::SourceDestIdLenMissmatch {
src_id_len,
dest_id_len,
} => {
write!(
f,
"missmatch of PDU source length {src_id_len} and destination length {dest_id_len}"
)
}
PduError::ByteConversion(e) => {
write!(f, "{}", e)
}
PduError::FileSizeTooLarge(value) => {
write!(f, "file size value {value} exceeds allowed 32 bit width")
}
PduError::WrongDirectiveType { found, expected } => {
write!(f, "found directive type {found:?}, expected {expected:?}")
}
PduError::InvalidConditionCode(raw_code) => {
write!(f, "found invalid condition code with raw value {raw_code}")
}
PduError::InvalidDirectiveType { found, expected } => {
write!(
f,
"invalid directive type value {found}, expected {expected:?}"
)
}
PduError::InvalidChecksumType(checksum_type) => {
write!(f, "invalid checksum type {checksum_type}")
}
PduError::ChecksumError(checksum) => {
write!(f, "checksum error for CRC {checksum:#04x}")
}
PduError::TlvLvError(error) => {
write!(f, "pdu tlv error: {error}")
}
PduError::FormatError => {
write!(f, "generic PDU format error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PduError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
PduError::ByteConversion(e) => Some(e),
PduError::TlvLvError(e) => Some(e),
_ => None,
}
}
}
impl From<ByteConversionError> for PduError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl From<TlvLvError> for PduError {
fn from(e: TlvLvError) -> Self {
Self::TlvLvError(e)
}
} }
pub trait WritablePduPacket { pub trait WritablePduPacket {
@ -103,42 +178,33 @@ pub trait WritablePduPacket {
pub trait CfdpPdu { pub trait CfdpPdu {
fn pdu_header(&self) -> &PduHeader; fn pdu_header(&self) -> &PduHeader;
#[inline]
fn source_id(&self) -> UnsignedByteField { fn source_id(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().source_entity_id self.pdu_header().common_pdu_conf().source_entity_id
} }
#[inline]
fn dest_id(&self) -> UnsignedByteField { fn dest_id(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().dest_entity_id self.pdu_header().common_pdu_conf().dest_entity_id
} }
#[inline]
fn transaction_seq_num(&self) -> UnsignedByteField { fn transaction_seq_num(&self) -> UnsignedByteField {
self.pdu_header().common_pdu_conf().transaction_seq_num self.pdu_header().common_pdu_conf().transaction_seq_num
} }
#[inline]
fn transmission_mode(&self) -> TransmissionMode { fn transmission_mode(&self) -> TransmissionMode {
self.pdu_header().common_pdu_conf().trans_mode self.pdu_header().common_pdu_conf().trans_mode
} }
#[inline]
fn direction(&self) -> Direction { fn direction(&self) -> Direction {
self.pdu_header().common_pdu_conf().direction self.pdu_header().common_pdu_conf().direction
} }
#[inline]
fn crc_flag(&self) -> CrcFlag { fn crc_flag(&self) -> CrcFlag {
self.pdu_header().common_pdu_conf().crc_flag self.pdu_header().common_pdu_conf().crc_flag
} }
#[inline]
fn file_flag(&self) -> LargeFileFlag { fn file_flag(&self) -> LargeFileFlag {
self.pdu_header().common_pdu_conf().file_flag self.pdu_header().common_pdu_conf().file_flag
} }
#[inline]
fn pdu_type(&self) -> PduType { fn pdu_type(&self) -> PduType {
self.pdu_header().pdu_type() self.pdu_header().pdu_type()
} }
@ -154,7 +220,6 @@ pub trait CfdpPdu {
/// same. /// same.
#[derive(Debug, Copy, Clone, Eq)] #[derive(Debug, Copy, Clone, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct CommonPduConfig { pub struct CommonPduConfig {
source_entity_id: UnsignedByteField, source_entity_id: UnsignedByteField,
dest_entity_id: UnsignedByteField, dest_entity_id: UnsignedByteField,
@ -167,7 +232,6 @@ pub struct CommonPduConfig {
// TODO: Builder pattern might be applicable here.. // TODO: Builder pattern might be applicable here..
impl CommonPduConfig { impl CommonPduConfig {
#[inline]
pub fn new( pub fn new(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -199,7 +263,6 @@ impl CommonPduConfig {
}) })
} }
#[inline]
pub fn new_with_byte_fields( pub fn new_with_byte_fields(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -216,12 +279,10 @@ impl CommonPduConfig {
) )
} }
#[inline]
pub fn source_id(&self) -> UnsignedByteField { pub fn source_id(&self) -> UnsignedByteField {
self.source_entity_id self.source_entity_id
} }
#[inline]
fn source_dest_id_check( fn source_dest_id_check(
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
dest_id: impl Into<UnsignedByteField>, dest_id: impl Into<UnsignedByteField>,
@ -244,7 +305,6 @@ impl CommonPduConfig {
Ok((source_id, dest_id)) Ok((source_id, dest_id))
} }
#[inline]
pub fn set_source_and_dest_id( pub fn set_source_and_dest_id(
&mut self, &mut self,
source_id: impl Into<UnsignedByteField>, source_id: impl Into<UnsignedByteField>,
@ -256,7 +316,6 @@ impl CommonPduConfig {
Ok(()) Ok(())
} }
#[inline]
pub fn dest_id(&self) -> UnsignedByteField { pub fn dest_id(&self) -> UnsignedByteField {
self.dest_entity_id self.dest_entity_id
} }
@ -265,7 +324,6 @@ impl CommonPduConfig {
impl Default for CommonPduConfig { impl Default for CommonPduConfig {
/// The defaults for the source ID, destination ID and the transaction sequence number is the /// The defaults for the source ID, destination ID and the transaction sequence number is the
/// [UnsignedByteFieldU8] with an intitial value of 0 /// [UnsignedByteFieldU8] with an intitial value of 0
#[inline]
fn default() -> Self { fn default() -> Self {
// The new function can not fail for these input parameters. // The new function can not fail for these input parameters.
Self::new( Self::new(
@ -282,7 +340,6 @@ impl Default for CommonPduConfig {
} }
impl PartialEq for CommonPduConfig { impl PartialEq for CommonPduConfig {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.source_entity_id.value() == other.source_entity_id.value() self.source_entity_id.value() == other.source_entity_id.value()
&& self.dest_entity_id.value() == other.dest_entity_id.value() && self.dest_entity_id.value() == other.dest_entity_id.value()
@ -301,7 +358,6 @@ pub const FIXED_HEADER_LEN: usize = 4;
/// For detailed information, refer to chapter 5.1 of the CFDP standard. /// For detailed information, refer to chapter 5.1 of the CFDP standard.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PduHeader { pub struct PduHeader {
pdu_type: PduType, pdu_type: PduType,
pdu_conf: CommonPduConfig, pdu_conf: CommonPduConfig,
@ -311,7 +367,6 @@ pub struct PduHeader {
} }
impl PduHeader { impl PduHeader {
#[inline]
pub fn new_for_file_data( pub fn new_for_file_data(
pdu_conf: CommonPduConfig, pdu_conf: CommonPduConfig,
pdu_datafield_len: u16, pdu_datafield_len: u16,
@ -327,7 +382,6 @@ impl PduHeader {
) )
} }
#[inline]
pub fn new_for_file_data_default(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self { pub fn new_for_file_data_default(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self {
Self::new_generic( Self::new_generic(
PduType::FileData, PduType::FileData,
@ -337,7 +391,6 @@ impl PduHeader {
SegmentationControl::NoRecordBoundaryPreservation, SegmentationControl::NoRecordBoundaryPreservation,
) )
} }
#[inline]
pub fn new_no_file_data(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self { pub fn new_no_file_data(pdu_conf: CommonPduConfig, pdu_datafield_len: u16) -> Self {
Self::new_generic( Self::new_generic(
PduType::FileDirective, PduType::FileDirective,
@ -348,7 +401,6 @@ impl PduHeader {
) )
} }
#[inline]
pub fn new_generic( pub fn new_generic(
pdu_type: PduType, pdu_type: PduType,
pdu_conf: CommonPduConfig, pdu_conf: CommonPduConfig,
@ -366,7 +418,6 @@ impl PduHeader {
} }
/// Returns only the length of the PDU header when written to a raw buffer. /// Returns only the length of the PDU header when written to a raw buffer.
#[inline]
pub fn header_len(&self) -> usize { pub fn header_len(&self) -> usize {
FIXED_HEADER_LEN FIXED_HEADER_LEN
+ self.pdu_conf.source_entity_id.size() + self.pdu_conf.source_entity_id.size()
@ -374,14 +425,12 @@ impl PduHeader {
+ self.pdu_conf.dest_entity_id.size() + self.pdu_conf.dest_entity_id.size()
} }
#[inline]
pub fn pdu_datafield_len(&self) -> usize { pub fn pdu_datafield_len(&self) -> usize {
self.pdu_datafield_len.into() self.pdu_datafield_len.into()
} }
/// Returns the full length of the PDU when written to a raw buffer, which is the header length /// Returns the full length of the PDU when written to a raw buffer, which is the header length
/// plus the PDU datafield length. /// plus the PDU datafield length.
#[inline]
pub fn pdu_len(&self) -> usize { pub fn pdu_len(&self) -> usize {
self.header_len() + self.pdu_datafield_len as usize self.header_len() + self.pdu_datafield_len as usize
} }
@ -454,7 +503,7 @@ impl PduHeader {
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&buf[..self.pdu_len()]); digest.update(&buf[..self.pdu_len()]);
if digest.finalize() != 0 { if digest.finalize() != 0 {
return Err(PduError::Checksum(u16::from_be_bytes( return Err(PduError::ChecksumError(u16::from_be_bytes(
buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(), buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(),
))); )));
} }
@ -554,13 +603,10 @@ impl PduHeader {
current_idx, current_idx,
)) ))
} }
#[inline]
pub fn pdu_type(&self) -> PduType { pub fn pdu_type(&self) -> PduType {
self.pdu_type self.pdu_type
} }
#[inline]
pub fn common_pdu_conf(&self) -> &CommonPduConfig { pub fn common_pdu_conf(&self) -> &CommonPduConfig {
&self.pdu_conf &self.pdu_conf
} }
@ -568,8 +614,6 @@ impl PduHeader {
pub fn seg_metadata_flag(&self) -> SegmentMetadataFlag { pub fn seg_metadata_flag(&self) -> SegmentMetadataFlag {
self.seg_metadata_flag self.seg_metadata_flag
} }
#[inline]
pub fn seg_ctrl(&self) -> SegmentationControl { pub fn seg_ctrl(&self) -> SegmentationControl {
self.seg_ctrl self.seg_ctrl
} }
@ -903,7 +947,7 @@ mod tests {
assert_eq!(raw_version, CFDP_VERSION_2 + 1); assert_eq!(raw_version, CFDP_VERSION_2 + 1);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"CFDP version missmatch, found 2, expected 1" "cfdp version missmatch, found 2, expected 1"
); );
} else { } else {
panic!("invalid exception: {}", error); panic!("invalid exception: {}", error);
@ -951,7 +995,7 @@ mod tests {
assert_eq!(expected, 7); assert_eq!(expected, 7);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"byte conversion error: source slice with size 6 too small, expected at least 7 bytes" "source slice with size 6 too small, expected at least 7 bytes"
); );
} }
} }
@ -1006,7 +1050,7 @@ mod tests {
assert_eq!(dest_id_len, 2); assert_eq!(dest_id_len, 2);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"missmatch of PDU source ID length 1 and destination ID length 2" "missmatch of PDU source length 1 and destination length 2"
); );
} }
} }

View File

@ -12,7 +12,6 @@ use super::{
/// Helper type to encapsulate both normal file size segment requests and large file size segment /// Helper type to encapsulate both normal file size segment requests and large file size segment
/// requests. /// requests.
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum SegmentRequests<'a> { pub enum SegmentRequests<'a> {
U32Pairs(&'a [(u32, u32)]), U32Pairs(&'a [(u32, u32)]),
U64Pairs(&'a [(u64, u64)]), U64Pairs(&'a [(u64, u64)]),
@ -32,7 +31,6 @@ impl SegmentRequests<'_> {
/// It exposes a specialized API which simplifies to generate these NAK PDUs with the /// It exposes a specialized API which simplifies to generate these NAK PDUs with the
/// format according to CFDP chapter 5.2.6. /// format according to CFDP chapter 5.2.6.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct NakPduCreator<'seg_reqs> { pub struct NakPduCreator<'seg_reqs> {
pdu_header: PduHeader, pdu_header: PduHeader,
start_of_scope: u64, start_of_scope: u64,
@ -61,7 +59,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u32, start_of_scope: u32,
end_of_scope: u32, end_of_scope: u32,
segment_requests: &'seg_reqs [(u32, u32)], segment_requests: &'seg_reqs [(u32, u32)],
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
let mut passed_segment_requests = None; let mut passed_segment_requests = None;
if !segment_requests.is_empty() { if !segment_requests.is_empty() {
passed_segment_requests = Some(SegmentRequests::U32Pairs(segment_requests)); passed_segment_requests = Some(SegmentRequests::U32Pairs(segment_requests));
@ -79,7 +77,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u64, start_of_scope: u64,
end_of_scope: u64, end_of_scope: u64,
segment_requests: &'seg_reqs [(u64, u64)], segment_requests: &'seg_reqs [(u64, u64)],
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
let mut passed_segment_requests = None; let mut passed_segment_requests = None;
if !segment_requests.is_empty() { if !segment_requests.is_empty() {
passed_segment_requests = Some(SegmentRequests::U64Pairs(segment_requests)); passed_segment_requests = Some(SegmentRequests::U64Pairs(segment_requests));
@ -97,7 +95,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u64, start_of_scope: u64,
end_of_scope: u64, end_of_scope: u64,
segment_requests: Option<SegmentRequests<'seg_reqs>>, segment_requests: Option<SegmentRequests<'seg_reqs>>,
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
// Force correct direction flag. // Force correct direction flag.
pdu_header.pdu_conf.direction = Direction::TowardsSender; pdu_header.pdu_conf.direction = Direction::TowardsSender;
if let Some(ref segment_requests) = segment_requests { if let Some(ref segment_requests) = segment_requests {
@ -269,7 +267,7 @@ impl SegReqFromBytes for u64 {
} }
} }
impl<T> Iterator for SegmentRequestIter<'_, T> impl<'a, T> Iterator for SegmentRequestIter<'a, T>
where where
T: SegReqFromBytes, T: SegReqFromBytes,
{ {
@ -282,8 +280,8 @@ where
} }
} }
impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u32> { impl<'a, 'b> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'b, u32> {
fn eq(&self, other: &SegmentRequests<'a>) -> bool { fn eq(&self, other: &SegmentRequests) -> bool {
match other { match other {
SegmentRequests::U32Pairs(pairs) => self.compare_pairs(pairs), SegmentRequests::U32Pairs(pairs) => self.compare_pairs(pairs),
SegmentRequests::U64Pairs(pairs) => { SegmentRequests::U64Pairs(pairs) => {
@ -296,8 +294,8 @@ impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u32> {
} }
} }
impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u64> { impl<'a, 'b> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'b, u64> {
fn eq(&self, other: &SegmentRequests<'a>) -> bool { fn eq(&self, other: &SegmentRequests) -> bool {
match other { match other {
SegmentRequests::U32Pairs(pairs) => { SegmentRequests::U32Pairs(pairs) => {
if pairs.is_empty() && self.seq_req_raw.is_empty() { if pairs.is_empty() && self.seq_req_raw.is_empty() {
@ -310,7 +308,7 @@ impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u64> {
} }
} }
impl<T> SegmentRequestIter<'_, T> impl<'a, T> SegmentRequestIter<'a, T>
where where
T: SegReqFromBytes + PartialEq, T: SegReqFromBytes + PartialEq,
{ {
@ -355,7 +353,6 @@ impl<T: SegReqFromBytes> SegmentRequestIter<'_, T> {
/// ///
/// The NAK format is expected to be conforming to CFDP chapter 5.2.6. /// The NAK format is expected to be conforming to CFDP chapter 5.2.6.
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct NakPduReader<'seg_reqs> { pub struct NakPduReader<'seg_reqs> {
pdu_header: PduHeader, pdu_header: PduHeader,
start_of_scope: u64, start_of_scope: u64,
@ -374,11 +371,11 @@ impl CfdpPdu for NakPduReader<'_> {
} }
impl<'seg_reqs> NakPduReader<'seg_reqs> { impl<'seg_reqs> NakPduReader<'seg_reqs> {
pub fn new(buf: &'seg_reqs [u8]) -> Result<NakPduReader<'seg_reqs>, PduError> { pub fn new(buf: &'seg_reqs [u8]) -> Result<NakPduReader, PduError> {
Self::from_bytes(buf) Self::from_bytes(buf)
} }
pub fn from_bytes(buf: &'seg_reqs [u8]) -> Result<NakPduReader<'seg_reqs>, PduError> { pub fn from_bytes(buf: &'seg_reqs [u8]) -> Result<NakPduReader, PduError> {
let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?; let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?;
let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?; let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?;
// Minimum length of 9: 1 byte directive field and start and end of scope for normal file // Minimum length of 9: 1 byte directive field and start and end of scope for normal file
@ -474,7 +471,7 @@ impl<'seg_reqs> NakPduReader<'seg_reqs> {
} }
} }
impl<'a> PartialEq<NakPduCreator<'a>> for NakPduReader<'_> { impl<'a, 'b> PartialEq<NakPduCreator<'a>> for NakPduReader<'b> {
fn eq(&self, other: &NakPduCreator<'a>) -> bool { fn eq(&self, other: &NakPduCreator<'a>) -> bool {
if self.pdu_header() != other.pdu_header() if self.pdu_header() != other.pdu_header()
|| self.end_of_scope() != other.end_of_scope() || self.end_of_scope() != other.end_of_scope()
@ -751,7 +748,7 @@ mod tests {
if let PduError::InvalidStartOrEndOfScopeValue = error { if let PduError::InvalidStartOrEndOfScopeValue = error {
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid start or end of scope value for NAK PDU" "invalid start or end of scope for NAK PDU"
); );
} else { } else {
panic!("unexpected error {error}"); panic!("unexpected error {error}");
@ -796,7 +793,7 @@ mod tests {
nak_vec[nak_pdu.len_written() - 1] -= 1; nak_vec[nak_pdu.len_written() - 1] -= 1;
let nak_pdu_deser = NakPduReader::new(&nak_vec); let nak_pdu_deser = NakPduReader::new(&nak_vec);
assert!(nak_pdu_deser.is_err()); assert!(nak_pdu_deser.is_err());
if let Err(PduError::Checksum(raw)) = nak_pdu_deser { if let Err(PduError::ChecksumError(raw)) = nak_pdu_deser {
assert_eq!( assert_eq!(
raw, raw,
u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap()) u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap())

View File

@ -9,14 +9,10 @@ use crate::ByteConversionError;
use alloc::vec; use alloc::vec;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError};
pub mod msg_to_user; pub mod msg_to_user;
pub const MIN_TLV_LEN: usize = 2; pub const MIN_TLV_LEN: usize = 2;
@ -43,26 +39,6 @@ pub trait GenericTlv {
} }
} }
pub trait ReadableTlv {
fn value(&self) -> &[u8];
/// Checks whether the value field is empty.
fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
fn len_full(&self) -> usize {
self.len_value() + 2
}
}
pub trait WritableTlv { pub trait WritableTlv {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>; fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
fn len_written(&self) -> usize; fn len_written(&self) -> usize;
@ -76,7 +52,6 @@ pub trait WritableTlv {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum TlvType { pub enum TlvType {
FilestoreRequest = 0x00, FilestoreRequest = 0x00,
@ -89,7 +64,6 @@ pub enum TlvType {
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum TlvTypeField { pub enum TlvTypeField {
Standard(TlvType), Standard(TlvType),
Custom(u8), Custom(u8),
@ -97,7 +71,6 @@ pub enum TlvTypeField {
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum FilestoreActionCode { pub enum FilestoreActionCode {
CreateFile = 0b0000, CreateFile = 0b0000,
@ -145,7 +118,6 @@ impl From<TlvTypeField> for u8 {
/// this will be the lifetime of that data reference. /// this will be the lifetime of that data reference.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Tlv<'data> { pub struct Tlv<'data> {
tlv_type_field: TlvTypeField, tlv_type_field: TlvTypeField,
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
@ -153,14 +125,14 @@ pub struct Tlv<'data> {
} }
impl<'data> Tlv<'data> { impl<'data> Tlv<'data> {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> { pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvError> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Standard(tlv_type), tlv_type_field: TlvTypeField::Standard(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
}) })
} }
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> { pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvError> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Custom(tlv_type), tlv_type_field: TlvTypeField::Custom(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
@ -175,11 +147,31 @@ impl<'data> Tlv<'data> {
} }
} }
pub fn value(&self) -> &[u8] {
self.lv.value()
}
/// Checks whether the value field is empty.
pub fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
pub fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
pub fn len_full(&self) -> usize {
self.len_value() + 2
}
/// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the /// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the
/// bytestream with the exact size of the expected TLV. This function will take care /// bytestream with the exact size of the expected TLV. This function will take care
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using /// of parsing the length byte, and the length of the parsed TLV can be retrieved using
/// [Self::len_full]. /// [Self::len_full].
pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, ByteConversionError> { pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, TlvLvError> {
generic_len_check_deserialization(buf, MIN_TLV_LEN)?; generic_len_check_deserialization(buf, MIN_TLV_LEN)?;
let mut tlv = Self { let mut tlv = Self {
tlv_type_field: TlvTypeField::from(buf[0]), tlv_type_field: TlvTypeField::from(buf[0]),
@ -196,27 +188,6 @@ impl<'data> Tlv<'data> {
pub fn raw_data(&self) -> Option<&[u8]> { pub fn raw_data(&self) -> Option<&[u8]> {
self.lv.raw_data() self.lv.raw_data()
} }
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
TlvOwned {
tlv_type_field: self.tlv_type_field,
data: self.value().to_vec(),
}
}
}
#[cfg(feature = "alloc")]
impl PartialEq<TlvOwned> for Tlv<'_> {
fn eq(&self, other: &TlvOwned) -> bool {
self.tlv_type_field == other.tlv_type_field && self.value() == other.value()
}
}
impl ReadableTlv for Tlv<'_> {
fn value(&self) -> &[u8] {
self.lv.value()
}
} }
impl WritableTlv for Tlv<'_> { impl WritableTlv for Tlv<'_> {
@ -237,95 +208,22 @@ impl GenericTlv for Tlv<'_> {
} }
} }
#[cfg(feature = "alloc")] pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
pub mod alloc_mod { let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
use super::*; found: raw_type,
expected: Some(expected_tlv_type.into()),
/// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime })?;
/// associated to a data slice. if tlv_type != expected_tlv_type {
#[derive(Debug, Clone, PartialEq, Eq)] return Err(TlvLvError::InvalidTlvTypeField {
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] found: tlv_type as u8,
#[cfg_attr(feature = "defmt", derive(defmt::Format))] expected: Some(expected_tlv_type as u8),
pub struct TlvOwned { });
pub(crate) tlv_type_field: TlvTypeField,
pub(crate) data: Vec<u8>,
}
impl TlvOwned {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: data.to_vec(),
}
}
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Custom(tlv_type),
data: data.to_vec(),
}
}
/// Creates a TLV with an empty value field.
pub fn new_empty(tlv_type: TlvType) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: Vec::new(),
}
}
pub fn as_tlv(&self) -> Tlv<'_> {
Tlv {
tlv_type_field: self.tlv_type_field,
// The API should ensure that the data length is never to large, so the unwrap for the
// LV creation should never be an issue.
lv: Lv::new(&self.data).expect("lv creation failed unexpectedly"),
}
}
}
impl ReadableTlv for TlvOwned {
fn value(&self) -> &[u8] {
&self.data
}
}
impl WritableTlv for TlvOwned {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
generic_len_check_data_serialization(buf, self.data.len(), MIN_TLV_LEN)?;
buf[0] = self.tlv_type_field.into();
buf[1] = self.data.len() as u8;
buf[2..2 + self.data.len()].copy_from_slice(&self.data);
Ok(self.len_written())
}
fn len_written(&self) -> usize {
self.data.len() + 2
}
}
impl GenericTlv for TlvOwned {
fn tlv_type_field(&self) -> TlvTypeField {
self.tlv_type_field
}
}
impl From<Tlv<'_>> for TlvOwned {
fn from(value: Tlv<'_>) -> Self {
value.to_owned()
}
}
impl PartialEq<Tlv<'_>> for TlvOwned {
fn eq(&self, other: &Tlv) -> bool {
self.tlv_type_field == other.tlv_type_field && self.data == other.value()
}
} }
Ok(())
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct EntityIdTlv { pub struct EntityIdTlv {
entity_id: UnsignedByteField, entity_id: UnsignedByteField,
} }
@ -374,8 +272,11 @@ impl EntityIdTlv {
Self::len_check(buf)?; Self::len_check(buf)?;
self.entity_id self.entity_id
.write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?; .write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?;
// Can't fail. Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).map_err(|e| match e {
Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap()) TlvLvError::ByteConversion(e) => e,
// All other errors are impossible.
_ => panic!("unexpected TLV error"),
})
} }
} }
@ -398,26 +299,24 @@ impl GenericTlv for EntityIdTlv {
} }
} }
impl TryFrom<Tlv<'_>> for EntityIdTlv { impl<'data> TryFrom<Tlv<'data>> for EntityIdTlv {
type Error = TlvLvError; type Error = TlvLvError;
fn try_from(value: Tlv) -> Result<Self, TlvLvError> { fn try_from(value: Tlv) -> Result<Self, Self::Error> {
match value.tlv_type_field { match value.tlv_type_field {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::EntityId { if tlv_type != TlvType::EntityId {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
} });
.into());
} }
} }
TlvTypeField::Custom(val) => { TlvTypeField::Custom(val) => {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: val, found: val,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
} });
.into());
} }
} }
let len_value = value.value().len(); let len_value = value.value().len();
@ -449,7 +348,6 @@ pub fn fs_request_has_second_filename(action_code: FilestoreActionCode) -> bool
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
struct FilestoreTlvBase<'first_name, 'second_name> { struct FilestoreTlvBase<'first_name, 'second_name> {
pub action_code: FilestoreActionCode, pub action_code: FilestoreActionCode,
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
@ -663,7 +561,6 @@ impl GenericTlv for FilestoreRequestTlv<'_, '_> {
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct FilestoreResponseTlv<'first_name, 'second_name, 'fs_msg> { pub struct FilestoreResponseTlv<'first_name, 'second_name, 'fs_msg> {
#[cfg_attr(feature = "serde", serde(borrow))] #[cfg_attr(feature = "serde", serde(borrow))]
base: FilestoreTlvBase<'first_name, 'second_name>, base: FilestoreTlvBase<'first_name, 'second_name>,
@ -848,23 +745,6 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
} }
} }
pub(crate) fn verify_tlv_type(
raw_type: u8,
expected_tlv_type: TlvType,
) -> Result<(), InvalidTlvTypeFieldError> {
let tlv_type = TlvType::try_from(raw_type).map_err(|_| InvalidTlvTypeFieldError {
found: raw_type,
expected: Some(expected_tlv_type.into()),
})?;
if tlv_type != expected_tlv_type {
return Err(InvalidTlvTypeFieldError {
found: tlv_type as u8,
expected: Some(expected_tlv_type as u8),
});
}
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -1052,14 +932,14 @@ mod tests {
let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large); let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large);
assert!(tlv_res.is_err()); assert!(tlv_res.is_err());
let error = tlv_res.unwrap_err(); let error = tlv_res.unwrap_err();
match error { if let TlvLvError::DataTooLarge(size) = error {
TlvLvDataTooLargeError(size) => {
assert_eq!(size, u8::MAX as usize + 1); assert_eq!(size, u8::MAX as usize + 1);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"data with size 256 larger than allowed 255 bytes" "data with size 256 larger than allowed 255 bytes"
); );
} } else {
panic!("unexpected error {:?}", error);
} }
} }
@ -1369,8 +1249,7 @@ mod tests {
let error = EntityIdTlv::try_from(msg_to_user_tlv); let error = EntityIdTlv::try_from(msg_to_user_tlv);
assert!(error.is_err()); assert!(error.is_err());
let error = error.unwrap_err(); let error = error.unwrap_err();
if let TlvLvError::InvalidTlvTypeField(InvalidTlvTypeFieldError { found, expected }) = error if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
{
assert_eq!(found, TlvType::MsgToUser as u8); assert_eq!(found, TlvType::MsgToUser as u8);
assert_eq!(expected, Some(TlvType::EntityId as u8)); assert_eq!(expected, Some(TlvType::EntityId as u8));
assert_eq!( assert_eq!(
@ -1414,71 +1293,4 @@ mod tests {
assert_eq!(tlv_as_vec[0], 20); assert_eq!(tlv_as_vec[0], 20);
assert_eq!(tlv_as_vec[1], 0); assert_eq!(tlv_as_vec[1], 0);
} }
#[test]
fn test_tlv_to_owned() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned = tlv_res.to_owned();
assert_eq!(tlv_res, tlv_owned);
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_owned_from_conversion, tlv_owned);
assert_eq!(tlv_owned_from_conversion, tlv_res);
}
#[test]
fn test_owned_tlv() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::EntityId)
);
assert_eq!(tlv_res.len_full(), 3);
assert_eq!(tlv_res.value().len(), 1);
assert_eq!(tlv_res.len_value(), 1);
assert!(!tlv_res.is_empty());
assert_eq!(tlv_res.value()[0], 5);
}
#[test]
fn test_owned_tlv_empty() {
let tlv_res = TlvOwned::new_empty(TlvType::FlowLabel);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::FlowLabel)
);
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_custom_type() {
let tlv_res = TlvOwned::new_with_custom_type(32, &[]);
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_conversion_to_bytes() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_res.to_vec(), tlv_owned_from_conversion.to_vec());
}
} }

View File

@ -1,11 +1,6 @@
//! Abstractions for the Message to User CFDP TLV subtype. //! Abstractions for the Message to User CFDP TLV subtype.
#[cfg(feature = "alloc")] use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use super::TlvOwned; use crate::ByteConversionError;
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use crate::{
cfdp::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError},
ByteConversionError,
};
use delegate::delegate; use delegate::delegate;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -15,7 +10,7 @@ pub struct MsgToUserTlv<'data> {
impl<'data> MsgToUserTlv<'data> { impl<'data> MsgToUserTlv<'data> {
/// Create a new message to user TLV where the type field is set correctly. /// Create a new message to user TLV where the type field is set correctly.
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLargeError> { pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvError> {
Ok(Self { Ok(Self {
tlv: Tlv::new(TlvType::MsgToUser, value)?, tlv: Tlv::new(TlvType::MsgToUser, value)?,
}) })
@ -65,38 +60,21 @@ impl<'data> MsgToUserTlv<'data> {
match msg_to_user.tlv.tlv_type_field() { match msg_to_user.tlv.tlv_type_field() {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::MsgToUser { if tlv_type != TlvType::MsgToUser {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
} });
.into());
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: raw, found: raw,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
} });
.into());
} }
} }
Ok(msg_to_user) Ok(msg_to_user)
} }
pub fn to_tlv(&self) -> Tlv<'data> {
self.tlv
}
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
self.tlv.to_owned()
}
}
impl<'a> From<MsgToUserTlv<'a>> for Tlv<'a> {
fn from(value: MsgToUserTlv<'a>) -> Tlv<'a> {
value.to_tlv()
}
} }
impl WritableTlv for MsgToUserTlv<'_> { impl WritableTlv for MsgToUserTlv<'_> {
@ -161,40 +139,6 @@ mod tests {
); );
} }
#[test]
fn test_msg_to_user_type_reduction() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv = msg_to_user.to_tlv();
assert_eq!(
tlv.tlv_type_field(),
TlvTypeField::Standard(TlvType::MsgToUser)
);
assert_eq!(tlv.value(), custom_value);
}
#[test]
fn test_msg_to_user_to_tlv() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv: Tlv = msg_to_user.into();
assert_eq!(msg_to_user.to_tlv(), tlv);
}
#[test]
fn test_msg_to_user_owner_converter() {
let custom_value: [u8; 4] = [1, 2, 3, 4];
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
let tlv = msg_to_user.to_owned();
assert_eq!(
tlv.tlv_type_field(),
TlvTypeField::Standard(TlvType::MsgToUser)
);
assert_eq!(tlv.value(), custom_value);
}
#[test] #[test]
fn test_reserved_msg_deserialization() { fn test_reserved_msg_deserialization() {
let custom_value: [u8; 3] = [1, 2, 3]; let custom_value: [u8; 3] = [1, 2, 3];
@ -210,9 +154,9 @@ mod tests {
fn test_reserved_msg_deserialization_invalid_type() { fn test_reserved_msg_deserialization_invalid_type() {
let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3]; let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3];
let error = MsgToUserTlv::from_bytes(&trash).unwrap_err(); let error = MsgToUserTlv::from_bytes(&trash).unwrap_err();
if let TlvLvError::InvalidTlvTypeField(inner) = error { if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
assert_eq!(inner.found, TlvType::FlowLabel as u8); assert_eq!(found, TlvType::FlowLabel as u8);
assert_eq!(inner.expected, Some(TlvType::MsgToUser as u8)); assert_eq!(expected, Some(TlvType::MsgToUser as u8));
} else { } else {
panic!("Wrong error type returned: {:?}", error); panic!("Wrong error type returned: {:?}", error);
} }

View File

@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)] #[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
pub enum Subservice { pub enum Subservice {
// Regular HK // Regular HK

View File

@ -1,16 +1,18 @@
//! Common definitions and helpers required to create PUS TMTC packets according to //! Common definitions and helpers required to create PUS TMTC packets according to
//! [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/) //! [ECSS-E-ST-70-41C](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/)
//! //!
//! You can find the PUS telecommand types in the [tc] module and the the PUS telemetry //! You can find the PUS telecommand definitions in the [tc] module and ithe PUS telemetry definitions
//! types inside the [tm] module. //! inside the [tm] module.
use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE}; use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::Debug; use core::fmt::{Debug, Display, Formatter};
use core::mem::size_of; use core::mem::size_of;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod event; pub mod event;
pub mod hk; pub mod hk;
@ -20,6 +22,7 @@ pub mod tm;
pub mod verification; pub mod verification;
pub type CrcType = u16; pub type CrcType = u16;
pub const CCSDS_HEADER_LEN: usize = size_of::<crate::zc::SpHeader>();
#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)] #[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -71,7 +74,6 @@ pub enum PusServiceId {
/// All PUS versions. Only PUS C is supported by this library. /// All PUS versions. Only PUS C is supported by this library.
#[derive(PartialEq, Eq, Copy, Clone, Debug)] #[derive(PartialEq, Eq, Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[non_exhaustive] #[non_exhaustive]
pub enum PusVersion { pub enum PusVersion {
EsaPus = 0, EsaPus = 0,
@ -146,19 +148,49 @@ pub enum PfcReal {
DoubleMilStd = 4, DoubleMilStd = 4,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PusError { pub enum PusError {
#[error("PUS version {0:?} not supported")]
VersionNotSupported(PusVersion), VersionNotSupported(PusVersion),
#[error("checksum verification for crc16 {0:#06x} failed")]
ChecksumFailure(u16), ChecksumFailure(u16),
/// CRC16 needs to be calculated first /// CRC16 needs to be calculated first
//#[error("crc16 was not calculated")] CrcCalculationMissing,
//CrcCalculationMissing, ByteConversion(ByteConversionError),
#[error("pus error: {0}")] }
ByteConversion(#[from] ByteConversionError),
impl Display for PusError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PusError::VersionNotSupported(v) => {
write!(f, "PUS version {v:?} not supported")
}
PusError::ChecksumFailure(crc) => {
write!(f, "checksum verification for crc16 {crc:#06x} failed")
}
PusError::CrcCalculationMissing => {
write!(f, "crc16 was not calculated")
}
PusError::ByteConversion(e) => {
write!(f, "pus error: {e}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PusError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if let PusError::ByteConversion(e) = self {
return Some(e);
}
None
}
}
impl From<ByteConversionError> for PusError {
fn from(e: ByteConversionError) -> Self {
PusError::ByteConversion(e)
}
} }
/// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry /// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry
@ -173,12 +205,13 @@ pub trait PusPacket: CcsdsPacket {
fn crc16(&self) -> Option<u16>; fn crc16(&self) -> Option<u16>;
} }
pub(crate) fn crc_from_raw_data(raw_data: &[u8]) -> Result<u16, ByteConversionError> { pub(crate) fn crc_from_raw_data(raw_data: &[u8]) -> Result<u16, PusError> {
if raw_data.len() < 2 { if raw_data.len() < 2 {
return Err(ByteConversionError::FromSliceTooSmall { return Err(ByteConversionError::FromSliceTooSmall {
found: raw_data.len(), found: raw_data.len(),
expected: 2, expected: 2,
}); }
.into());
} }
Ok(u16::from_be_bytes( Ok(u16::from_be_bytes(
raw_data[raw_data.len() - 2..raw_data.len()] raw_data[raw_data.len() - 2..raw_data.len()]
@ -193,16 +226,35 @@ pub(crate) fn calc_pus_crc16(bytes: &[u8]) -> u16 {
digest.finalize() digest.finalize()
} }
pub(crate) fn crc_procedure(
calc_on_serialization: bool,
cached_crc16: &Option<u16>,
start_idx: usize,
curr_idx: usize,
slice: &[u8],
) -> Result<u16, PusError> {
let crc16;
if calc_on_serialization {
crc16 = calc_pus_crc16(&slice[start_idx..curr_idx])
} else if cached_crc16.is_none() {
return Err(PusError::CrcCalculationMissing);
} else {
crc16 = cached_crc16.unwrap();
}
Ok(crc16)
}
pub(crate) fn user_data_from_raw( pub(crate) fn user_data_from_raw(
current_idx: usize, current_idx: usize,
total_len: usize, total_len: usize,
slice: &[u8], slice: &[u8],
) -> Result<&[u8], ByteConversionError> { ) -> Result<&[u8], PusError> {
match current_idx { match current_idx {
_ if current_idx > total_len - 2 => Err(ByteConversionError::FromSliceTooSmall { _ if current_idx > total_len - 2 => Err(ByteConversionError::FromSliceTooSmall {
found: total_len - 2, found: total_len - 2,
expected: current_idx, expected: current_idx,
}), }
.into()),
_ => Ok(&slice[current_idx..total_len - 2]), _ => Ok(&slice[current_idx..total_len - 2]),
} }
} }
@ -212,7 +264,7 @@ pub(crate) fn verify_crc16_ccitt_false_from_raw_to_pus_error(
crc16: u16, crc16: u16,
) -> Result<(), PusError> { ) -> Result<(), PusError> {
verify_crc16_ccitt_false_from_raw(raw_data) verify_crc16_ccitt_false_from_raw(raw_data)
.then_some(()) .then(|| ())
.ok_or(PusError::ChecksumFailure(crc16)) .ok_or(PusError::ChecksumFailure(crc16))
} }
@ -228,13 +280,9 @@ pub(crate) fn verify_crc16_ccitt_false_from_raw(raw_data: &[u8]) -> bool {
macro_rules! ccsds_impl { macro_rules! ccsds_impl {
() => { () => {
delegate!(to self.sp_header { delegate!(to self.sp_header {
#[inline]
fn ccsds_version(&self) -> u8; fn ccsds_version(&self) -> u8;
#[inline]
fn packet_id(&self) -> crate::PacketId; fn packet_id(&self) -> crate::PacketId;
#[inline]
fn psc(&self) -> crate::PacketSequenceCtrl; fn psc(&self) -> crate::PacketSequenceCtrl;
#[inline]
fn data_len(&self) -> u16; fn data_len(&self) -> u16;
}); });
} }
@ -243,11 +291,8 @@ macro_rules! ccsds_impl {
macro_rules! sp_header_impls { macro_rules! sp_header_impls {
() => { () => {
delegate!(to self.sp_header { delegate!(to self.sp_header {
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool; pub fn set_apid(&mut self, apid: u16) -> bool;
#[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool; pub fn set_seq_count(&mut self, seq_count: u16) -> bool;
#[inline]
pub fn set_seq_flags(&mut self, seq_flag: SequenceFlags); pub fn set_seq_flags(&mut self, seq_flag: SequenceFlags);
}); });
} }

View File

@ -54,7 +54,6 @@ pub enum SchedStatus {
} }
impl From<bool> for SchedStatus { impl From<bool> for SchedStatus {
#[inline]
fn from(value: bool) -> Self { fn from(value: bool) -> Self {
if value { if value {
SchedStatus::Enabled SchedStatus::Enabled

View File

@ -9,11 +9,9 @@
//! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader}; //! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader};
//! //!
//! // Create a ping telecommand with no user application data //! // Create a ping telecommand with no user application data
//! let pus_tc = PusTcCreator::new_no_app_data( //! let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap();
//! SpHeader::new_from_apid(0x02), //! let tc_header = PusTcSecondaryHeader::new_simple(17, 1);
//! PusTcSecondaryHeader::new_simple(17, 1), //! let pus_tc = PusTcCreator::new_no_app_data(&mut sph, tc_header, true);
//! true
//! );
//! println!("{:?}", pus_tc); //! println!("{:?}", pus_tc);
//! assert_eq!(pus_tc.service(), 17); //! assert_eq!(pus_tc.service(), 17);
//! assert_eq!(pus_tc.subservice(), 1); //! assert_eq!(pus_tc.subservice(), 1);
@ -45,11 +43,13 @@ use delegate::delegate;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::AsBytes;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
pub use legacy_tc::*;
/// PUS C secondary header length is fixed /// PUS C secondary header length is fixed
pub const PUC_TC_SECONDARY_HEADER_LEN: usize = size_of::<zc::PusTcSecondaryHeader>(); pub const PUC_TC_SECONDARY_HEADER_LEN: usize = size_of::<zc::PusTcSecondaryHeader>();
pub const PUS_TC_MIN_LEN_WITHOUT_APP_DATA: usize = pub const PUS_TC_MIN_LEN_WITHOUT_APP_DATA: usize =
@ -61,7 +61,6 @@ pub trait IsPusTelecommand {}
#[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)] #[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)] #[repr(u8)]
enum AckOpts { enum AckOpts {
Acceptance = 0b1000, Acceptance = 0b1000,
@ -86,9 +85,9 @@ pub trait GenericPusTcSecondaryHeader {
pub mod zc { pub mod zc {
use crate::ecss::tc::GenericPusTcSecondaryHeader; use crate::ecss::tc::GenericPusTcSecondaryHeader;
use crate::ecss::{PusError, PusVersion}; use crate::ecss::{PusError, PusVersion};
use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16}; use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16};
#[derive(FromBytes, IntoBytes, Immutable, Unaligned)] #[derive(FromZeroes, FromBytes, AsBytes, Unaligned)]
#[repr(C)] #[repr(C)]
pub struct PusTcSecondaryHeader { pub struct PusTcSecondaryHeader {
version_ack: u8, version_ack: u8,
@ -113,36 +112,40 @@ pub mod zc {
} }
impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader { impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
PusVersion::try_from((self.version_ack >> 4) & 0b1111).unwrap_or(PusVersion::Invalid) PusVersion::try_from(self.version_ack >> 4 & 0b1111).unwrap_or(PusVersion::Invalid)
} }
#[inline]
fn ack_flags(&self) -> u8 { fn ack_flags(&self) -> u8 {
self.version_ack & 0b1111 self.version_ack & 0b1111
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn source_id(&self) -> u16 { fn source_id(&self) -> u16 {
self.source_id.get() self.source_id.get()
} }
} }
impl PusTcSecondaryHeader {
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
Self::read_from(slice)
}
}
} }
#[derive(PartialEq, Eq, Copy, Clone, Debug)] #[derive(PartialEq, Eq, Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PusTcSecondaryHeader { pub struct PusTcSecondaryHeader {
pub service: u8, pub service: u8,
pub subservice: u8, pub subservice: u8,
@ -152,27 +155,22 @@ pub struct PusTcSecondaryHeader {
} }
impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader { impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader {
#[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
self.version self.version
} }
#[inline]
fn ack_flags(&self) -> u8 { fn ack_flags(&self) -> u8 {
self.ack self.ack
} }
#[inline]
fn service(&self) -> u8 { fn service(&self) -> u8 {
self.service self.service
} }
#[inline]
fn subservice(&self) -> u8 { fn subservice(&self) -> u8 {
self.subservice self.subservice
} }
#[inline]
fn source_id(&self) -> u16 { fn source_id(&self) -> u16 {
self.source_id self.source_id
} }
@ -193,7 +191,6 @@ impl TryFrom<zc::PusTcSecondaryHeader> for PusTcSecondaryHeader {
} }
impl PusTcSecondaryHeader { impl PusTcSecondaryHeader {
#[inline]
pub fn new_simple(service: u8, subservice: u8) -> Self { pub fn new_simple(service: u8, subservice: u8) -> Self {
PusTcSecondaryHeader { PusTcSecondaryHeader {
service, service,
@ -204,7 +201,6 @@ impl PusTcSecondaryHeader {
} }
} }
#[inline]
pub fn new(service: u8, subservice: u8, ack: u8, source_id: u16) -> Self { pub fn new(service: u8, subservice: u8, ack: u8, source_id: u16) -> Self {
PusTcSecondaryHeader { PusTcSecondaryHeader {
service, service,
@ -216,49 +212,89 @@ impl PusTcSecondaryHeader {
} }
} }
/// This class can be used to create PUS C telecommand packet. It is the primary data structure to pub mod legacy_tc {
/// generate the raw byte representation of a PUS telecommand. use crate::ecss::tc::{
zc, GenericPusTcSecondaryHeader, IsPusTelecommand, PusTcSecondaryHeader, ACK_ALL,
PUC_TC_SECONDARY_HEADER_LEN, PUS_TC_MIN_LEN_WITHOUT_APP_DATA,
};
use crate::ecss::{
ccsds_impl, crc_from_raw_data, crc_procedure, sp_header_impls,
verify_crc16_ccitt_false_from_raw_to_pus_error, PusError, PusPacket, WritablePusPacket,
CCSDS_HEADER_LEN,
};
use crate::ecss::{user_data_from_raw, PusVersion};
use crate::SequenceFlags;
use crate::{ByteConversionError, CcsdsPacket, PacketType, SpHeader, CRC_CCITT_FALSE};
use core::mem::size_of;
use delegate::delegate;
use zerocopy::AsBytes;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// This class models the PUS C telecommand packet. It is the primary data structure to generate the
/// raw byte representation of a PUS telecommand or to deserialize from one from raw bytes.
/// ///
/// This class also derives the [serde::Serialize] and [serde::Deserialize] trait if the /// This class also derives the [serde::Serialize] and [serde::Deserialize] trait if the
/// [serde] feature is used, which allows to send around TC packets in a raw byte format using a /// [serde] feature is used, which allows to send around TC packets in a raw byte format using a
/// serde provider like [postcard](https://docs.rs/postcard/latest/postcard/). /// serde provider like [postcard](https://docs.rs/postcard/latest/postcard/).
/// ///
/// There is no spare bytes support yet. /// There is no spare bytes support yet.
#[derive(Copy, Clone, Debug, PartialEq, Eq)] ///
/// # Lifetimes
///
/// * `'raw_data` - If the TC is not constructed from a raw slice, this will be the life time of
/// a buffer where the user provided application data will be serialized into. If it
/// is, this is the lifetime of the raw byte slice it is constructed from.
#[derive(Eq, Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] pub struct PusTc<'raw_data> {
pub struct PusTcCreator<'app_data> {
sp_header: SpHeader, sp_header: SpHeader,
pub sec_header: PusTcSecondaryHeader, pub sec_header: PusTcSecondaryHeader,
app_data: &'app_data [u8], /// If this is set to false, a manual call to [PusTc::calc_own_crc16] or
/// [PusTc::update_packet_fields] is necessary for the serialized or cached CRC16 to be valid.
pub calc_crc_on_serialization: bool,
#[cfg_attr(feature = "serde", serde(skip))]
raw_data: Option<&'raw_data [u8]>,
app_data: &'raw_data [u8],
crc16: Option<u16>,
} }
impl<'app_data> PusTcCreator<'app_data> { impl<'raw_data> PusTc<'raw_data> {
/// Generates a new struct instance. /// Generates a new struct instance.
/// ///
/// # Arguments /// # Arguments
/// ///
/// * `sp_header` - Space packet header information. The correct packet type and the secondary /// * `sp_header` - Space packet header information. The correct packet type will be set
/// header flag are set correctly by the constructor. /// automatically
/// * `sec_header` - Information contained in the data field header, including the service /// * `sec_header` - Information contained in the data field header, including the service
/// and subservice type /// and subservice type
/// * `app_data` - Custom application data /// * `app_data` - Custom application data
/// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length /// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length
/// field. If this is not set to true, [Self::update_ccsds_data_len] can be called to set /// field. If this is not set to true, [PusTc::update_ccsds_data_len] can be called to set
/// the correct value to this field manually /// the correct value to this field manually
#[inline] #[deprecated(
since = "0.7.0",
note = "Use specialized PusTcCreator or PusTcReader classes instead"
)]
pub fn new( pub fn new(
mut sp_header: SpHeader, sp_header: &mut SpHeader,
sec_header: PusTcSecondaryHeader, sec_header: PusTcSecondaryHeader,
app_data: &'app_data [u8], app_data: Option<&'raw_data [u8]>,
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
sp_header.set_packet_type(PacketType::Tc); sp_header.set_packet_type(PacketType::Tc);
sp_header.set_sec_header_flag(); sp_header.set_sec_header_flag();
let mut pus_tc = Self { let mut pus_tc = Self {
sp_header, sp_header: *sp_header,
app_data, raw_data: None,
app_data: app_data.unwrap_or(&[]),
sec_header, sec_header,
calc_crc_on_serialization: true,
crc16: None,
}; };
if set_ccsds_len { if set_ccsds_len {
pus_tc.update_ccsds_data_len(); pus_tc.update_ccsds_data_len();
@ -266,16 +302,20 @@ impl<'app_data> PusTcCreator<'app_data> {
pus_tc pus_tc
} }
/// Simplified version of the [Self::new] function which allows to only specify service /// Simplified version of the [PusTc::new] function which allows to only specify service and
/// and subservice instead of the full PUS TC secondary header. /// subservice instead of the full PUS TC secondary header.
#[inline] #[deprecated(
since = "0.7.0",
note = "Use specialized PusTcCreator or PusTcReader classes instead"
)]
pub fn new_simple( pub fn new_simple(
sph: SpHeader, sph: &mut SpHeader,
service: u8, service: u8,
subservice: u8, subservice: u8,
app_data: &'app_data [u8], app_data: Option<&'raw_data [u8]>,
set_ccsds_len: bool, set_ccsds_len: bool,
) -> Self { ) -> Self {
#[allow(deprecated)]
Self::new( Self::new(
sph, sph,
PusTcSecondaryHeader::new(service, subservice, ACK_ALL, 0), PusTcSecondaryHeader::new(service, subservice, ACK_ALL, 0),
@ -284,26 +324,10 @@ impl<'app_data> PusTcCreator<'app_data> {
) )
} }
#[inline]
pub fn new_no_app_data(
sp_header: SpHeader,
sec_header: PusTcSecondaryHeader,
set_ccsds_len: bool,
) -> Self {
Self::new(sp_header, sec_header, &[], set_ccsds_len)
}
#[inline]
pub fn sp_header(&self) -> &SpHeader { pub fn sp_header(&self) -> &SpHeader {
&self.sp_header &self.sp_header
} }
#[inline]
pub fn sp_header_mut(&mut self) -> &mut SpHeader {
&mut self.sp_header
}
#[inline]
pub fn set_ack_field(&mut self, ack: u8) -> bool { pub fn set_ack_field(&mut self, ack: u8) -> bool {
if ack > 0b1111 { if ack > 0b1111 {
return false; return false;
@ -312,7 +336,6 @@ impl<'app_data> PusTcCreator<'app_data> {
true true
} }
#[inline]
pub fn set_source_id(&mut self, source_id: u16) { pub fn set_source_id(&mut self, source_id: u16) {
self.sec_header.source_id = source_id; self.sec_header.source_id = source_id;
} }
@ -320,48 +343,122 @@ impl<'app_data> PusTcCreator<'app_data> {
sp_header_impls!(); sp_header_impls!();
/// Calculate the CCSDS space packet data length field and sets it /// Calculate the CCSDS space packet data length field and sets it
/// This is called automatically if the `set_ccsds_len` argument in the [Self::new] call was /// This is called automatically if the `set_ccsds_len` argument in the [PusTc::new] call was
/// used. /// used.
/// If this was not done or the application data is set or changed after construction, /// If this was not done or the application data is set or changed after construction,
/// this function needs to be called to ensure that the data length field of the CCSDS header /// this function needs to be called to ensure that the data length field of the CCSDS header
/// is set correctly. /// is set correctly.
#[inline]
pub fn update_ccsds_data_len(&mut self) { pub fn update_ccsds_data_len(&mut self) {
self.sp_header.data_len = self.sp_header.data_len =
self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1; self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1;
} }
/// This function calculates and returns the CRC16 for the current packet. /// This function should be called before the TC packet is serialized if
pub fn calc_own_crc16(&self) -> u16 { /// [PusTc::calc_crc_on_serialization] is set to False. It will calculate and cache the CRC16.
pub fn calc_own_crc16(&mut self) {
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
let sph_zc = crate::zc::SpHeader::from(self.sp_header); let sph_zc = crate::zc::SpHeader::from(self.sp_header);
digest.update(sph_zc.as_bytes()); digest.update(sph_zc.as_bytes());
let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap(); let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
digest.update(pus_tc_header.as_bytes()); digest.update(pus_tc_header.as_bytes());
if !self.app_data.is_empty() {
digest.update(self.app_data); digest.update(self.app_data);
digest.finalize() }
self.crc16 = Some(digest.finalize())
}
/// This helper function calls both [PusTc::update_ccsds_data_len] and [PusTc::calc_own_crc16].
pub fn update_packet_fields(&mut self) {
self.update_ccsds_data_len();
self.calc_own_crc16();
} }
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub fn append_to_vec(&self, vec: &mut Vec<u8>) -> usize { #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub fn append_to_vec(&self, vec: &mut Vec<u8>) -> Result<usize, PusError> {
let sph_zc = crate::zc::SpHeader::from(self.sp_header); let sph_zc = crate::zc::SpHeader::from(self.sp_header);
let mut appended_len = PUS_TC_MIN_LEN_WITHOUT_APP_DATA; let appended_len = PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len();
appended_len += self.app_data.len();
let start_idx = vec.len(); let start_idx = vec.len();
let mut ser_len = 0;
vec.extend_from_slice(sph_zc.as_bytes()); vec.extend_from_slice(sph_zc.as_bytes());
ser_len += sph_zc.as_bytes().len();
// The PUS version is hardcoded to PUS C // The PUS version is hardcoded to PUS C
let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap(); let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
vec.extend_from_slice(pus_tc_header.as_bytes()); vec.extend_from_slice(pus_tc_header.as_bytes());
ser_len += pus_tc_header.as_bytes().len();
vec.extend_from_slice(self.app_data); vec.extend_from_slice(self.app_data);
let mut digest = CRC_CCITT_FALSE.digest(); ser_len += self.app_data.len();
digest.update(&vec[start_idx..start_idx + appended_len - 2]); let crc16 = crc_procedure(
vec.extend_from_slice(&digest.finalize().to_be_bytes()); self.calc_crc_on_serialization,
appended_len &self.crc16,
start_idx,
ser_len,
&vec[start_idx..ser_len],
)?;
vec.extend_from_slice(crc16.to_be_bytes().as_slice());
Ok(appended_len)
}
/// Create a [PusTc] instance from a raw slice. On success, it returns a tuple containing
/// the instance and the found byte length of the packet.
#[deprecated(
since = "0.7.0",
note = "Use specialized PusTcCreator or PusTcReader classes instead"
)]
pub fn from_bytes(slice: &'raw_data [u8]) -> Result<(Self, usize), PusError> {
let raw_data_len = slice.len();
if raw_data_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA {
return Err(ByteConversionError::FromSliceTooSmall {
found: raw_data_len,
expected: PUS_TC_MIN_LEN_WITHOUT_APP_DATA,
}
.into());
}
let mut current_idx = 0;
let (sp_header, _) = SpHeader::from_be_bytes(&slice[0..CCSDS_HEADER_LEN])?;
current_idx += CCSDS_HEADER_LEN;
let total_len = sp_header.total_len();
if raw_data_len < total_len || total_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA {
return Err(ByteConversionError::FromSliceTooSmall {
found: raw_data_len,
expected: total_len,
}
.into());
}
let sec_header = zc::PusTcSecondaryHeader::from_bytes(
&slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN],
)
.ok_or(ByteConversionError::ZeroCopyFromError)?;
current_idx += PUC_TC_SECONDARY_HEADER_LEN;
let raw_data = &slice[0..total_len];
let pus_tc = Self {
sp_header,
sec_header: PusTcSecondaryHeader::try_from(sec_header).unwrap(),
raw_data: Some(raw_data),
app_data: user_data_from_raw(current_idx, total_len, slice)?,
calc_crc_on_serialization: false,
crc16: Some(crc_from_raw_data(raw_data)?),
};
verify_crc16_ccitt_false_from_raw_to_pus_error(
raw_data,
pus_tc.crc16.expect("CRC16 invalid"),
)?;
Ok((pus_tc, total_len))
}
#[deprecated(since = "0.5.2", note = "use raw_bytes() instead")]
pub fn raw(&self) -> Option<&'raw_data [u8]> {
self.raw_bytes()
}
/// If [Self] was constructed [Self::from_bytes], this function will return the slice it was
/// constructed from. Otherwise, [None] will be returned.
pub fn raw_bytes(&self) -> Option<&'raw_data [u8]> {
self.raw_data
} }
} }
impl WritablePusPacket for PusTcCreator<'_> { impl WritablePusPacket for PusTc<'_> {
#[inline]
fn len_written(&self) -> usize { fn len_written(&self) -> usize {
PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len() PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len()
} }
@ -382,8 +479,222 @@ impl WritablePusPacket for PusTcCreator<'_> {
curr_idx += CCSDS_HEADER_LEN; curr_idx += CCSDS_HEADER_LEN;
let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
sec_header sec_header
.write_to(&mut slice[curr_idx..curr_idx + tc_header_len]) .write_to_bytes(&mut slice[curr_idx..curr_idx + tc_header_len])
.map_err(|_| ByteConversionError::ZeroCopyToError)?; .ok_or(ByteConversionError::ZeroCopyToError)?;
curr_idx += tc_header_len;
slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data);
curr_idx += self.app_data.len();
let crc16 = crc_procedure(
self.calc_crc_on_serialization,
&self.crc16,
0,
curr_idx,
slice,
)?;
slice[curr_idx..curr_idx + 2].copy_from_slice(crc16.to_be_bytes().as_slice());
curr_idx += 2;
Ok(curr_idx)
}
}
impl PartialEq for PusTc<'_> {
fn eq(&self, other: &Self) -> bool {
self.sp_header == other.sp_header
&& self.sec_header == other.sec_header
&& self.app_data == other.app_data
}
}
impl CcsdsPacket for PusTc<'_> {
ccsds_impl!();
}
impl PusPacket for PusTc<'_> {
delegate!(to self.sec_header {
fn pus_version(&self) -> PusVersion;
fn service(&self) -> u8;
fn subservice(&self) -> u8;
});
fn user_data(&self) -> &[u8] {
self.app_data
}
fn crc16(&self) -> Option<u16> {
self.crc16
}
}
impl GenericPusTcSecondaryHeader for PusTc<'_> {
delegate!(to self.sec_header {
fn pus_version(&self) -> PusVersion;
fn service(&self) -> u8;
fn subservice(&self) -> u8;
fn source_id(&self) -> u16;
fn ack_flags(&self) -> u8;
});
}
impl IsPusTelecommand for PusTc<'_> {}
}
/// This class can be used to create PUS C telecommand packet. It is the primary data structure to
/// generate the raw byte representation of a PUS telecommand.
///
/// This class also derives the [serde::Serialize] and [serde::Deserialize] trait if the
/// [serde] feature is used, which allows to send around TC packets in a raw byte format using a
/// serde provider like [postcard](https://docs.rs/postcard/latest/postcard/).
///
/// There is no spare bytes support yet.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct PusTcCreator<'raw_data> {
sp_header: SpHeader,
pub sec_header: PusTcSecondaryHeader,
app_data: &'raw_data [u8],
}
impl<'raw_data> PusTcCreator<'raw_data> {
/// Generates a new struct instance.
///
/// # Arguments
///
/// * `sp_header` - Space packet header information. The correct packet type will be set
/// automatically
/// * `sec_header` - Information contained in the data field header, including the service
/// and subservice type
/// * `app_data` - Custom application data
/// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length
/// field. If this is not set to true, [PusTc::update_ccsds_data_len] can be called to set
/// the correct value to this field manually
pub fn new(
sp_header: &mut SpHeader,
sec_header: PusTcSecondaryHeader,
app_data: &'raw_data [u8],
set_ccsds_len: bool,
) -> Self {
sp_header.set_packet_type(PacketType::Tc);
sp_header.set_sec_header_flag();
let mut pus_tc = Self {
sp_header: *sp_header,
app_data,
sec_header,
};
if set_ccsds_len {
pus_tc.update_ccsds_data_len();
}
pus_tc
}
/// Simplified version of the [PusTcCreator::new] function which allows to only specify service
/// and subservice instead of the full PUS TC secondary header.
pub fn new_simple(
sph: &mut SpHeader,
service: u8,
subservice: u8,
app_data: Option<&'raw_data [u8]>,
set_ccsds_len: bool,
) -> Self {
Self::new(
sph,
PusTcSecondaryHeader::new(service, subservice, ACK_ALL, 0),
app_data.unwrap_or(&[]),
set_ccsds_len,
)
}
pub fn new_no_app_data(
sp_header: &mut SpHeader,
sec_header: PusTcSecondaryHeader,
set_ccsds_len: bool,
) -> Self {
Self::new(sp_header, sec_header, &[], set_ccsds_len)
}
pub fn sp_header(&self) -> &SpHeader {
&self.sp_header
}
pub fn set_ack_field(&mut self, ack: u8) -> bool {
if ack > 0b1111 {
return false;
}
self.sec_header.ack = ack & 0b1111;
true
}
pub fn set_source_id(&mut self, source_id: u16) {
self.sec_header.source_id = source_id;
}
sp_header_impls!();
/// Calculate the CCSDS space packet data length field and sets it
/// This is called automatically if the `set_ccsds_len` argument in the [PusTc::new] call was
/// used.
/// If this was not done or the application data is set or changed after construction,
/// this function needs to be called to ensure that the data length field of the CCSDS header
/// is set correctly.
pub fn update_ccsds_data_len(&mut self) {
self.sp_header.data_len =
self.len_written() as u16 - size_of::<crate::zc::SpHeader>() as u16 - 1;
}
/// This function should be called before the TC packet is serialized if
/// [PusTc::calc_crc_on_serialization] is set to False. It will calculate and cache the CRC16.
pub fn calc_own_crc16(&self) -> u16 {
let mut digest = CRC_CCITT_FALSE.digest();
let sph_zc = crate::zc::SpHeader::from(self.sp_header);
digest.update(sph_zc.as_bytes());
let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
digest.update(pus_tc_header.as_bytes());
digest.update(self.app_data);
digest.finalize()
}
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub fn append_to_vec(&self, vec: &mut Vec<u8>) -> Result<usize, PusError> {
let sph_zc = crate::zc::SpHeader::from(self.sp_header);
let mut appended_len = PUS_TC_MIN_LEN_WITHOUT_APP_DATA;
appended_len += self.app_data.len();
let start_idx = vec.len();
vec.extend_from_slice(sph_zc.as_bytes());
// The PUS version is hardcoded to PUS C
let pus_tc_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
vec.extend_from_slice(pus_tc_header.as_bytes());
vec.extend_from_slice(self.app_data);
let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&vec[start_idx..start_idx + appended_len - 2]);
vec.extend_from_slice(&digest.finalize().to_be_bytes());
Ok(appended_len)
}
}
impl WritablePusPacket for PusTcCreator<'_> {
fn len_written(&self) -> usize {
PUS_TC_MIN_LEN_WITHOUT_APP_DATA + self.app_data.len()
}
/// Write the raw PUS byte representation to a provided buffer.
fn write_to_bytes(&self, slice: &mut [u8]) -> Result<usize, PusError> {
let mut curr_idx = 0;
let tc_header_len = size_of::<zc::PusTcSecondaryHeader>();
let total_size = self.len_written();
if total_size > slice.len() {
return Err(ByteConversionError::ToSliceTooSmall {
found: slice.len(),
expected: total_size,
}
.into());
}
self.sp_header.write_to_be_bytes(slice)?;
curr_idx += CCSDS_HEADER_LEN;
let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
sec_header
.write_to_bytes(&mut slice[curr_idx..curr_idx + tc_header_len])
.ok_or(ByteConversionError::ZeroCopyToError)?;
curr_idx += tc_header_len; curr_idx += tc_header_len;
slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data); slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data);
@ -402,20 +713,15 @@ impl CcsdsPacket for PusTcCreator<'_> {
impl PusPacket for PusTcCreator<'_> { impl PusPacket for PusTcCreator<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.app_data self.app_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.calc_own_crc16()) Some(self.calc_own_crc16())
} }
@ -423,15 +729,10 @@ impl PusPacket for PusTcCreator<'_> {
impl GenericPusTcSecondaryHeader for PusTcCreator<'_> { impl GenericPusTcSecondaryHeader for PusTcCreator<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn source_id(&self) -> u16; fn source_id(&self) -> u16;
#[inline]
fn ack_flags(&self) -> u8; fn ack_flags(&self) -> u8;
}); });
} }
@ -451,7 +752,6 @@ impl IsPusTelecommand for PusTcCreator<'_> {}
/// * `'raw_data` - Lifetime of the provided raw slice. /// * `'raw_data` - Lifetime of the provided raw slice.
#[derive(Eq, Copy, Clone, Debug)] #[derive(Eq, Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PusTcReader<'raw_data> { pub struct PusTcReader<'raw_data> {
#[cfg_attr(feature = "serde", serde(skip))] #[cfg_attr(feature = "serde", serde(skip))]
raw_data: &'raw_data [u8], raw_data: &'raw_data [u8],
@ -463,8 +763,7 @@ pub struct PusTcReader<'raw_data> {
impl<'raw_data> PusTcReader<'raw_data> { impl<'raw_data> PusTcReader<'raw_data> {
/// Create a [PusTcReader] instance from a raw slice. On success, it returns a tuple containing /// Create a [PusTcReader] instance from a raw slice. On success, it returns a tuple containing
/// the instance and the found byte length of the packet. This function also performs a CRC /// the instance and the found byte length of the packet.
/// check and will return an appropriate [PusError] if the check fails.
pub fn new(slice: &'raw_data [u8]) -> Result<(Self, usize), PusError> { pub fn new(slice: &'raw_data [u8]) -> Result<(Self, usize), PusError> {
let raw_data_len = slice.len(); let raw_data_len = slice.len();
if raw_data_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { if raw_data_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA {
@ -492,10 +791,10 @@ impl<'raw_data> PusTcReader<'raw_data> {
} }
.into()); .into());
} }
let sec_header = zc::PusTcSecondaryHeader::read_from_bytes( let sec_header = zc::PusTcSecondaryHeader::from_bytes(
&slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN], &slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN],
) )
.map_err(|_| ByteConversionError::ZeroCopyFromError)?; .ok_or(ByteConversionError::ZeroCopyFromError)?;
current_idx += PUC_TC_SECONDARY_HEADER_LEN; current_idx += PUC_TC_SECONDARY_HEADER_LEN;
let raw_data = &slice[0..total_len]; let raw_data = &slice[0..total_len];
let pus_tc = Self { let pus_tc = Self {
@ -509,29 +808,24 @@ impl<'raw_data> PusTcReader<'raw_data> {
Ok((pus_tc, total_len)) Ok((pus_tc, total_len))
} }
#[inline]
pub fn app_data(&self) -> &[u8] { pub fn app_data(&self) -> &[u8] {
self.user_data() self.user_data()
} }
#[inline]
pub fn raw_data(&self) -> &[u8] { pub fn raw_data(&self) -> &[u8] {
self.raw_data self.raw_data
} }
#[inline]
pub fn len_packed(&self) -> usize { pub fn len_packed(&self) -> usize {
self.sp_header.total_len() self.sp_header.total_len()
} }
#[inline]
pub fn sp_header(&self) -> &SpHeader { pub fn sp_header(&self) -> &SpHeader {
&self.sp_header &self.sp_header
} }
} }
impl PartialEq for PusTcReader<'_> { impl PartialEq for PusTcReader<'_> {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.raw_data == other.raw_data self.raw_data == other.raw_data
} }
@ -543,20 +837,15 @@ impl CcsdsPacket for PusTcReader<'_> {
impl PusPacket for PusTcReader<'_> { impl PusPacket for PusTcReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
}); });
#[inline]
fn user_data(&self) -> &[u8] { fn user_data(&self) -> &[u8] {
self.app_data self.app_data
} }
#[inline]
fn crc16(&self) -> Option<u16> { fn crc16(&self) -> Option<u16> {
Some(self.crc16) Some(self.crc16)
} }
@ -564,15 +853,10 @@ impl PusPacket for PusTcReader<'_> {
impl GenericPusTcSecondaryHeader for PusTcReader<'_> { impl GenericPusTcSecondaryHeader for PusTcReader<'_> {
delegate!(to self.sec_header { delegate!(to self.sec_header {
#[inline]
fn pus_version(&self) -> PusVersion; fn pus_version(&self) -> PusVersion;
#[inline]
fn service(&self) -> u8; fn service(&self) -> u8;
#[inline]
fn subservice(&self) -> u8; fn subservice(&self) -> u8;
#[inline]
fn source_id(&self) -> u16; fn source_id(&self) -> u16;
#[inline]
fn ack_flags(&self) -> u8; fn ack_flags(&self) -> u8;
}); });
} }
@ -610,19 +894,19 @@ mod tests {
use postcard::{from_bytes, to_allocvec}; use postcard::{from_bytes, to_allocvec};
fn base_ping_tc_full_ctor() -> PusTcCreator<'static> { fn base_ping_tc_full_ctor() -> PusTcCreator<'static> {
let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap(); let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap();
let tc_header = PusTcSecondaryHeader::new_simple(17, 1); let tc_header = PusTcSecondaryHeader::new_simple(17, 1);
PusTcCreator::new_no_app_data(sph, tc_header, true) PusTcCreator::new_no_app_data(&mut sph, tc_header, true)
} }
fn base_ping_tc_simple_ctor() -> PusTcCreator<'static> { fn base_ping_tc_simple_ctor() -> PusTcCreator<'static> {
let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap(); let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap();
PusTcCreator::new_simple(sph, 17, 1, &[], true) PusTcCreator::new_simple(&mut sph, 17, 1, None, true)
} }
fn base_ping_tc_simple_ctor_with_app_data(app_data: &'static [u8]) -> PusTcCreator<'static> { fn base_ping_tc_simple_ctor_with_app_data(app_data: &'static [u8]) -> PusTcCreator<'static> {
let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap(); let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap();
PusTcCreator::new_simple(sph, 17, 1, app_data, true) PusTcCreator::new_simple(&mut sph, 17, 1, Some(app_data), true)
} }
#[test] #[test]
@ -678,8 +962,8 @@ mod tests {
#[test] #[test]
fn test_update_func() { fn test_update_func() {
let sph = SpHeader::new_for_unseg_tc_checked(0x02, 0x34, 0).unwrap(); let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap();
let mut tc = PusTcCreator::new_simple(sph, 17, 1, &[], false); let mut tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, false);
assert_eq!(tc.data_len(), 0); assert_eq!(tc.data_len(), 0);
tc.update_ccsds_data_len(); tc.update_ccsds_data_len();
assert_eq!(tc.data_len(), 6); assert_eq!(tc.data_len(), 6);
@ -726,7 +1010,9 @@ mod tests {
fn test_vec_ser_deser() { fn test_vec_ser_deser() {
let pus_tc = base_ping_tc_simple_ctor(); let pus_tc = base_ping_tc_simple_ctor();
let mut test_vec = Vec::new(); let mut test_vec = Vec::new();
let size = pus_tc.append_to_vec(&mut test_vec); let size = pus_tc
.append_to_vec(&mut test_vec)
.expect("Error writing TC to vector");
assert_eq!(size, 13); assert_eq!(size, 13);
verify_test_tc_raw(&test_vec.as_slice()); verify_test_tc_raw(&test_vec.as_slice());
verify_crc_no_app_data(&test_vec.as_slice()); verify_crc_no_app_data(&test_vec.as_slice());
@ -772,7 +1058,9 @@ mod tests {
let pus_tc = base_ping_tc_simple_ctor_with_app_data(&[1, 2, 3]); let pus_tc = base_ping_tc_simple_ctor_with_app_data(&[1, 2, 3]);
verify_test_tc(&pus_tc, true, 16); verify_test_tc(&pus_tc, true, 16);
let mut test_vec = Vec::new(); let mut test_vec = Vec::new();
let size = pus_tc.append_to_vec(&mut test_vec); let size = pus_tc
.append_to_vec(&mut test_vec)
.expect("Error writing TC to vector");
assert_eq!(test_vec[11], 1); assert_eq!(test_vec[11], 1);
assert_eq!(test_vec[12], 2); assert_eq!(test_vec[12], 2);
assert_eq!(test_vec[13], 3); assert_eq!(test_vec[13], 3);
@ -851,8 +1139,7 @@ mod tests {
if !has_user_data { if !has_user_data {
assert!(tc.user_data().is_empty()); assert!(tc.user_data().is_empty());
} }
let mut comp_header = let mut comp_header = SpHeader::tc_unseg(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
SpHeader::new_for_unseg_tc_checked(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
comp_header.set_sec_header_flag(); comp_header.set_sec_header_flag();
assert_eq!(*tc.sp_header(), comp_header); assert_eq!(*tc.sp_header(), comp_header);
} }
@ -863,8 +1150,7 @@ mod tests {
assert!(tc.user_data().is_empty()); assert!(tc.user_data().is_empty());
} }
assert_eq!(tc.len_packed(), exp_full_len); assert_eq!(tc.len_packed(), exp_full_len);
let mut comp_header = let mut comp_header = SpHeader::tc_unseg(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
SpHeader::new_for_unseg_tc_checked(0x02, 0x34, exp_full_len as u16 - 7).unwrap();
comp_header.set_sec_header_flag(); comp_header.set_sec_header_flag();
assert_eq!(*tc.sp_header(), comp_header); assert_eq!(*tc.sp_header(), comp_header);
} }

File diff suppressed because it is too large Load Diff

View File

@ -22,6 +22,10 @@
//! //!
//! `spacepackets` supports various runtime environments and is also suitable for `no_std` environments. //! `spacepackets` supports various runtime environments and is also suitable for `no_std` environments.
//! //!
//! It also offers optional support for [`serde`](https://serde.rs/). This allows serializing and
//! deserializing them with an appropriate `serde` provider like
//! [`postcard`](https://github.com/jamesmunns/postcard).
//!
//! ### Default features //! ### Default features
//! //!
//! - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library. //! - [`std`](https://doc.rust-lang.org/std/): Enables functionality relying on the standard library.
@ -32,11 +36,7 @@
//! ### Optional features //! ### Optional features
//! //!
//! - [`serde`](https://serde.rs/): Adds `serde` support for most types by adding `Serialize` and //! - [`serde`](https://serde.rs/): Adds `serde` support for most types by adding `Serialize` and
//! `Deserialize` `derives. //! `Deserialize` `derive`s
//! - [`chrono`](https://crates.io/crates/chrono): Add basic support for the `chrono` time library.
//! - [`timelib`](https://crates.io/crates/time): Add basic support for the `time` time library.
//! - [`defmt`](https://defmt.ferrous-systems.com/): Add support for the `defmt` by adding the
//! [`defmt::Format`](https://defmt.ferrous-systems.com/format) derive on many types.
//! //!
//! ## Module //! ## Module
//! //!
@ -48,30 +48,35 @@
//! //!
//! ```rust //! ```rust
//! use spacepackets::SpHeader; //! use spacepackets::SpHeader;
//! let sp_header = SpHeader::new_for_unseg_tc_checked(0x42, 12, 1).expect("error creating CCSDS TC header"); //! let sp_header = SpHeader::tc_unseg(0x42, 12, 1).expect("Error creating CCSDS TC header");
//! println!("{:?}", sp_header); //! println!("{:?}", sp_header);
//! let mut ccsds_buf: [u8; 32] = [0; 32]; //! let mut ccsds_buf: [u8; 32] = [0; 32];
//! sp_header.write_to_be_bytes(&mut ccsds_buf).expect("Writing CCSDS TC header failed"); //! sp_header.write_to_be_bytes(&mut ccsds_buf).expect("Writing CCSDS TC header failed");
//! println!("{:x?}", &ccsds_buf[0..6]); //! println!("{:x?}", &ccsds_buf[0..6]);
//! ``` //! ```
#![no_std] #![no_std]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(doc_cfg, feature(doc_cfg))]
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
extern crate alloc; extern crate alloc;
#[cfg(any(feature = "std", test))] #[cfg(any(feature = "std", test))]
extern crate std; extern crate std;
use core::{fmt::Debug, hash::Hash}; use crate::ecss::CCSDS_HEADER_LEN;
use core::{
fmt::{Debug, Display, Formatter},
hash::Hash,
};
use crc::{Crc, CRC_16_IBM_3740}; use crc::{Crc, CRC_16_IBM_3740};
use delegate::delegate; use delegate::delegate;
use zerocopy::{FromBytes, IntoBytes};
#[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub mod cfdp; pub mod cfdp;
pub mod ecss; pub mod ecss;
pub mod seq_count;
pub mod time; pub mod time;
pub mod util; pub mod util;
@ -79,8 +84,6 @@ mod private {
pub trait Sealed {} pub trait Sealed {}
} }
pub const CCSDS_HEADER_LEN: usize = core::mem::size_of::<crate::zc::SpHeader>();
/// CRC algorithm used by the PUS standard, the CCSDS TC standard and the CFDP standard. /// CRC algorithm used by the PUS standard, the CCSDS TC standard and the CFDP standard.
pub const CRC_CCITT_FALSE: Crc<u16> = Crc::<u16>::new(&CRC_16_IBM_3740); pub const CRC_CCITT_FALSE: Crc<u16> = Crc::<u16>::new(&CRC_16_IBM_3740);
@ -88,28 +91,57 @@ pub const MAX_APID: u16 = 2u16.pow(11) - 1;
pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1; pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1;
/// Generic error type when converting to and from raw byte slices. /// Generic error type when converting to and from raw byte slices.
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum ByteConversionError { pub enum ByteConversionError {
/// The passed slice is too small. Returns the passed slice length and expected minimum size /// The passed slice is too small. Returns the passed slice length and expected minimum size
#[error("target slice with size {found} is too small, expected size of at least {expected}")] ToSliceTooSmall {
ToSliceTooSmall { found: usize, expected: usize }, found: usize,
expected: usize,
},
/// The provider buffer is too small. Returns the passed slice length and expected minimum size /// The provider buffer is too small. Returns the passed slice length and expected minimum size
#[error("source slice with size {found} too small, expected at least {expected} bytes")] FromSliceTooSmall {
FromSliceTooSmall { found: usize, expected: usize }, found: usize,
expected: usize,
},
/// The [zerocopy] library failed to write to bytes /// The [zerocopy] library failed to write to bytes
#[error("zerocopy serialization error")]
ZeroCopyToError, ZeroCopyToError,
/// The [zerocopy] library failed to read from bytes
#[error("zerocopy deserialization error")]
ZeroCopyFromError, ZeroCopyFromError,
} }
impl Display for ByteConversionError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
ByteConversionError::ToSliceTooSmall { found, expected } => {
write!(
f,
"target slice with size {} is too small, expected size of at least {}",
found, expected
)
}
ByteConversionError::FromSliceTooSmall { found, expected } => {
write!(
f,
"source slice with size {} too small, expected at least {} bytes",
found, expected
)
}
ByteConversionError::ZeroCopyToError => {
write!(f, "zerocopy serialization error")
}
ByteConversionError::ZeroCopyFromError => {
write!(f, "zerocopy deserialization error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for ByteConversionError {}
/// CCSDS packet type enumeration. /// CCSDS packet type enumeration.
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PacketType { pub enum PacketType {
Tm = 0, Tm = 0,
Tc = 1, Tc = 1,
@ -133,7 +165,6 @@ pub fn packet_type_in_raw_packet_id(packet_id: u16) -> PacketType {
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum SequenceFlags { pub enum SequenceFlags {
ContinuationSegment = 0b00, ContinuationSegment = 0b00,
FirstSegment = 0b01, FirstSegment = 0b01,
@ -161,7 +192,6 @@ impl TryFrom<u8> for SequenceFlags {
/// of the first two bytes in the CCSDS primary header. /// of the first two bytes in the CCSDS primary header.
#[derive(Debug, Eq, Copy, Clone)] #[derive(Debug, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PacketId { pub struct PacketId {
pub ptype: PacketType, pub ptype: PacketType,
pub sec_header_flag: bool, pub sec_header_flag: bool,
@ -169,21 +199,18 @@ pub struct PacketId {
} }
impl PartialEq for PacketId { impl PartialEq for PacketId {
#[inline]
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.raw().eq(&other.raw()) self.raw().eq(&other.raw())
} }
} }
impl PartialOrd for PacketId { impl PartialOrd for PacketId {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
} }
} }
impl Ord for PacketId { impl Ord for PacketId {
#[inline]
fn cmp(&self, other: &Self) -> core::cmp::Ordering { fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.raw().cmp(&other.raw()) self.raw().cmp(&other.raw())
} }
@ -197,7 +224,6 @@ impl Hash for PacketId {
} }
impl Default for PacketId { impl Default for PacketId {
#[inline]
fn default() -> Self { fn default() -> Self {
PacketId { PacketId {
ptype: PacketType::Tm, ptype: PacketType::Tm,
@ -208,34 +234,23 @@ impl Default for PacketId {
} }
impl PacketId { impl PacketId {
/// This constructor will panic if the passed APID exceeds [MAX_APID]. pub const fn const_tc(sec_header: bool, apid: u16) -> Self {
/// Use the checked constructor variants to avoid panics. Self::const_new(PacketType::Tc, sec_header, apid)
#[inline] }
pub const fn new_for_tc(sec_header: bool, apid: u16) -> Self {
pub const fn const_tm(sec_header: bool, apid: u16) -> Self {
Self::const_new(PacketType::Tm, sec_header, apid)
}
pub fn tc(sec_header: bool, apid: u16) -> Option<Self> {
Self::new(PacketType::Tc, sec_header, apid) Self::new(PacketType::Tc, sec_header, apid)
} }
/// This constructor will panic if the passed APID exceeds [MAX_APID]. pub fn tm(sec_header: bool, apid: u16) -> Option<Self> {
/// Use the checked constructor variants to avoid panics.
#[inline]
pub const fn new_for_tm(sec_header: bool, apid: u16) -> Self {
Self::new(PacketType::Tm, sec_header, apid) Self::new(PacketType::Tm, sec_header, apid)
} }
#[inline] pub const fn const_new(ptype: PacketType, sec_header: bool, apid: u16) -> Self {
pub fn new_for_tc_checked(sec_header: bool, apid: u16) -> Option<Self> {
Self::new_checked(PacketType::Tc, sec_header, apid)
}
#[inline]
pub fn new_for_tm_checked(sec_header: bool, apid: u16) -> Option<Self> {
Self::new_checked(PacketType::Tm, sec_header, apid)
}
/// This constructor will panic if the passed APID exceeds [MAX_APID].
/// Use the checked variants to avoid panics.
#[inline]
pub const fn new(ptype: PacketType, sec_header: bool, apid: u16) -> Self {
if apid > MAX_APID { if apid > MAX_APID {
panic!("APID too large"); panic!("APID too large");
} }
@ -246,18 +261,16 @@ impl PacketId {
} }
} }
#[inline] pub fn new(ptype: PacketType, sec_header_flag: bool, apid: u16) -> Option<PacketId> {
pub fn new_checked(ptype: PacketType, sec_header_flag: bool, apid: u16) -> Option<PacketId> {
if apid > MAX_APID { if apid > MAX_APID {
return None; return None;
} }
Some(PacketId::new(ptype, sec_header_flag, apid)) Some(PacketId::const_new(ptype, sec_header_flag, apid))
} }
/// Set a new Application Process ID (APID). If the passed number is invalid, the APID will /// Set a new Application Process ID (APID). If the passed number is invalid, the APID will
/// not be set and false will be returned. The maximum allowed value for the 11-bit field is /// not be set and false will be returned. The maximum allowed value for the 11-bit field is
/// 2047 /// 2047
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool { pub fn set_apid(&mut self, apid: u16) -> bool {
if apid > MAX_APID { if apid > MAX_APID {
return false; return false;
@ -266,7 +279,6 @@ impl PacketId {
true true
} }
#[inline]
pub fn apid(&self) -> u16 { pub fn apid(&self) -> u16 {
self.apid self.apid
} }
@ -291,17 +303,15 @@ impl From<u16> for PacketId {
/// third and the fourth byte in the CCSDS primary header. /// third and the fourth byte in the CCSDS primary header.
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PacketSequenceCtrl { pub struct PacketSequenceCtrl {
pub seq_flags: SequenceFlags, pub seq_flags: SequenceFlags,
seq_count: u16, seq_count: u16,
} }
impl PacketSequenceCtrl { impl PacketSequenceCtrl {
/// This constructor panics if the sequence count exceeds [MAX_SEQ_COUNT]. /// const variant of [PacketSequenceCtrl::new], but panics if the sequence count exceeds
/// Use [Self::new_checked] to avoid panics. /// [MAX_SEQ_COUNT].
#[inline] pub const fn const_new(seq_flags: SequenceFlags, seq_count: u16) -> PacketSequenceCtrl {
pub const fn new(seq_flags: SequenceFlags, seq_count: u16) -> PacketSequenceCtrl {
if seq_count > MAX_SEQ_COUNT { if seq_count > MAX_SEQ_COUNT {
panic!("Sequence count too large"); panic!("Sequence count too large");
} }
@ -312,22 +322,18 @@ impl PacketSequenceCtrl {
} }
/// Returns [None] if the passed sequence count exceeds [MAX_SEQ_COUNT]. /// Returns [None] if the passed sequence count exceeds [MAX_SEQ_COUNT].
#[inline] pub fn new(seq_flags: SequenceFlags, seq_count: u16) -> Option<PacketSequenceCtrl> {
pub fn new_checked(seq_flags: SequenceFlags, seq_count: u16) -> Option<PacketSequenceCtrl> {
if seq_count > MAX_SEQ_COUNT { if seq_count > MAX_SEQ_COUNT {
return None; return None;
} }
Some(PacketSequenceCtrl::new(seq_flags, seq_count)) Some(PacketSequenceCtrl::const_new(seq_flags, seq_count))
} }
#[inline]
pub fn raw(&self) -> u16 { pub fn raw(&self) -> u16 {
((self.seq_flags as u16) << 14) | self.seq_count ((self.seq_flags as u16) << 14) | self.seq_count
} }
/// Set a new sequence count. If the passed number is invalid, the sequence count will not be /// Set a new sequence count. If the passed number is invalid, the sequence count will not be
/// set and false will be returned. The maximum allowed value for the 14-bit field is 16383. /// set and false will be returned. The maximum allowed value for the 14-bit field is 16383.
#[inline]
pub fn set_seq_count(&mut self, ssc: u16) -> bool { pub fn set_seq_count(&mut self, ssc: u16) -> bool {
if ssc > MAX_SEQ_COUNT { if ssc > MAX_SEQ_COUNT {
return false; return false;
@ -336,7 +342,6 @@ impl PacketSequenceCtrl {
true true
} }
#[inline]
pub fn seq_count(&self) -> u16 { pub fn seq_count(&self) -> u16 {
self.seq_count self.seq_count
} }
@ -378,7 +383,6 @@ pub trait CcsdsPacket {
/// Retrieve data length field /// Retrieve data length field
fn data_len(&self) -> u16; fn data_len(&self) -> u16;
/// Retrieve the total packet size based on the data length field /// Retrieve the total packet size based on the data length field
#[inline]
fn total_len(&self) -> usize { fn total_len(&self) -> usize {
usize::from(self.data_len()) + CCSDS_HEADER_LEN + 1 usize::from(self.data_len()) + CCSDS_HEADER_LEN + 1
} }
@ -459,7 +463,6 @@ pub trait CcsdsPrimaryHeader {
/// * `data_len` - Data length field occupies the fifth and the sixth byte of the raw header /// * `data_len` - Data length field occupies the fifth and the sixth byte of the raw header
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct SpHeader { pub struct SpHeader {
pub version: u8, pub version: u8,
pub packet_id: PacketId, pub packet_id: PacketId,
@ -467,12 +470,9 @@ pub struct SpHeader {
pub data_len: u16, pub data_len: u16,
} }
pub type SpacePacketHeader = SpHeader;
impl Default for SpHeader { impl Default for SpHeader {
/// The default function sets the sequence flag field to [SequenceFlags::Unsegmented] and the /// The default function sets the sequence flag field to [SequenceFlags::Unsegmented]. The data
/// data length to 0. /// length field is set to 1, which denotes an empty space packets.
#[inline]
fn default() -> Self { fn default() -> Self {
SpHeader { SpHeader {
version: 0, version: 0,
@ -481,13 +481,12 @@ impl Default for SpHeader {
seq_flags: SequenceFlags::Unsegmented, seq_flags: SequenceFlags::Unsegmented,
seq_count: 0, seq_count: 0,
}, },
data_len: 0, data_len: 1,
} }
} }
} }
impl SpHeader { impl SpHeader {
#[inline]
pub const fn new(packet_id: PacketId, psc: PacketSequenceCtrl, data_len: u16) -> Self { pub const fn new(packet_id: PacketId, psc: PacketSequenceCtrl, data_len: u16) -> Self {
Self { Self {
version: 0, version: 0,
@ -497,43 +496,9 @@ impl SpHeader {
} }
} }
/// This constructor sets the sequence flag field to [SequenceFlags::Unsegmented] and the data /// const variant of the [SpHeader::new_fron_single_fields] function. Panics if the passed
/// length to 0. /// APID exceeds [MAX_APID] or the passed packet sequence count exceeds [MAX_SEQ_COUNT].
/// const fn const_new_from_single_fields(
/// This constructor will panic if the APID exceeds [MAX_APID].
#[inline]
pub const fn new_from_apid(apid: u16) -> Self {
SpHeader {
version: 0,
packet_id: PacketId::new(PacketType::Tm, false, apid),
psc: PacketSequenceCtrl {
seq_flags: SequenceFlags::Unsegmented,
seq_count: 0,
},
data_len: 0,
}
}
/// Checked variant of [Self::new_from_apid].
#[inline]
pub fn new_from_apid_checked(apid: u16) -> Option<Self> {
Some(SpHeader {
version: 0,
packet_id: PacketId::new_checked(PacketType::Tm, false, apid)?,
psc: PacketSequenceCtrl {
seq_flags: SequenceFlags::Unsegmented,
seq_count: 0,
},
data_len: 0,
})
}
/// This constructor panics if the passed APID exceeds [MAX_APID] or the passed packet sequence
/// count exceeds [MAX_SEQ_COUNT].
///
/// The checked constructor variants can be used to avoid panics.
#[inline]
pub const fn new_from_fields(
ptype: PacketType, ptype: PacketType,
sec_header: bool, sec_header: bool,
apid: u16, apid: u16,
@ -548,20 +513,17 @@ impl SpHeader {
panic!("APID is too large"); panic!("APID is too large");
} }
Self { Self {
psc: PacketSequenceCtrl::new(seq_flags, seq_count), psc: PacketSequenceCtrl::const_new(seq_flags, seq_count),
packet_id: PacketId::new(ptype, sec_header, apid), packet_id: PacketId::const_new(ptype, sec_header, apid),
data_len, data_len,
version: 0, version: 0,
} }
} }
/// Create a new Space Packet Header instance which can be used to create generic /// Create a new Space Packet Header instance which can be used to create generic
/// Space Packets. /// Space Packets. This will return [None] if the APID or sequence count argument
///
/// This will return [None] if the APID or sequence count argument
/// exceed [MAX_APID] or [MAX_SEQ_COUNT] respectively. The version field is set to 0b000. /// exceed [MAX_APID] or [MAX_SEQ_COUNT] respectively. The version field is set to 0b000.
#[inline] pub fn new_from_single_fields(
pub fn new_from_fields_checked(
ptype: PacketType, ptype: PacketType,
sec_header: bool, sec_header: bool,
apid: u16, apid: u16,
@ -572,117 +534,56 @@ impl SpHeader {
if seq_count > MAX_SEQ_COUNT || apid > MAX_APID { if seq_count > MAX_SEQ_COUNT || apid > MAX_APID {
return None; return None;
} }
Some(SpHeader::new_from_fields( Some(SpHeader::const_new_from_single_fields(
ptype, sec_header, apid, seq_flags, seq_count, data_len, ptype, sec_header, apid, seq_flags, seq_count, data_len,
)) ))
} }
/// Helper function for telemetry space packet headers. The packet type field will be /// Helper function for telemetry space packet headers. The packet type field will be
/// set accordingly. The secondary header flag field is set to false. /// set accordingly. The secondary header flag field is set to false.
#[inline] pub fn tm(apid: u16, seq_flags: SequenceFlags, seq_count: u16, data_len: u16) -> Option<Self> {
pub fn new_for_tm_checked( Self::new_from_single_fields(PacketType::Tm, false, apid, seq_flags, seq_count, data_len)
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Option<Self> {
Self::new_from_fields_checked(PacketType::Tm, false, apid, seq_flags, seq_count, data_len)
} }
/// Helper function for telemetry space packet headers. The packet type field will be /// Helper function for telemetry space packet headers. The packet type field will be
/// set accordingly. The secondary header flag field is set to false. /// set accordingly. The secondary header flag field is set to false.
#[inline] pub fn tc(apid: u16, seq_flags: SequenceFlags, seq_count: u16, data_len: u16) -> Option<Self> {
pub fn new_for_tc_checked( Self::new_from_single_fields(PacketType::Tc, false, apid, seq_flags, seq_count, data_len)
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Option<Self> {
Self::new_from_fields_checked(PacketType::Tc, false, apid, seq_flags, seq_count, data_len)
} }
/// This is an unchecked constructor which can panic on invalid input. /// Variant of [SpHeader::tm] which sets the sequence flag field to [SequenceFlags::Unsegmented]
#[inline] pub fn tm_unseg(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
pub const fn new_for_tm( Self::tm(apid, SequenceFlags::Unsegmented, seq_count, data_len)
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Self {
Self::new_from_fields(PacketType::Tm, false, apid, seq_flags, seq_count, data_len)
} }
/// This is an unchecked constructor which can panic on invalid input. /// Variant of [SpHeader::tc] which sets the sequence flag field to [SequenceFlags::Unsegmented]
#[inline] pub fn tc_unseg(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
pub const fn new_for_tc( Self::tc(apid, SequenceFlags::Unsegmented, seq_count, data_len)
apid: u16,
seq_flags: SequenceFlags,
seq_count: u16,
data_len: u16,
) -> Self {
Self::new_from_fields(PacketType::Tc, false, apid, seq_flags, seq_count, data_len)
} }
/// Variant of [SpHeader::new_for_tm_checked] which sets the sequence flag field to [SequenceFlags::Unsegmented] //noinspection RsTraitImplementation
#[inline] delegate!(to self.packet_id {
pub fn new_for_unseg_tm_checked(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
Self::new_for_tm_checked(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
/// Variant of [SpHeader::new_for_tc_checked] which sets the sequence flag field to [SequenceFlags::Unsegmented]
#[inline]
pub fn new_for_unseg_tc_checked(apid: u16, seq_count: u16, data_len: u16) -> Option<Self> {
Self::new_for_tc_checked(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
/// Variant of [SpHeader::new_for_tc] which sets the sequence flag field to [SequenceFlags::Unsegmented].
///
/// This is an unchecked constructor which can panic on invalid input.
#[inline]
pub const fn new_for_unseg_tc(apid: u16, seq_count: u16, data_len: u16) -> Self {
Self::new_for_tc(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
/// Variant of [SpHeader::new_for_tm] which sets the sequence flag field to [SequenceFlags::Unsegmented].
///
/// This is an unchecked constructor which can panic on invalid input.
#[inline]
pub const fn new_for_unseg_tm(apid: u16, seq_count: u16, data_len: u16) -> Self {
Self::new_for_tm(apid, SequenceFlags::Unsegmented, seq_count, data_len)
}
delegate! {
to self.packet_id {
/// Returns [false] and fails if the APID exceeds [MAX_APID] /// Returns [false] and fails if the APID exceeds [MAX_APID]
#[inline]
pub fn set_apid(&mut self, apid: u16) -> bool; pub fn set_apid(&mut self, apid: u16) -> bool;
} });
}
delegate! { delegate!(to self.psc {
to self.psc {
/// Returns [false] and fails if the sequence count exceeds [MAX_SEQ_COUNT] /// Returns [false] and fails if the sequence count exceeds [MAX_SEQ_COUNT]
#[inline]
pub fn set_seq_count(&mut self, seq_count: u16) -> bool; pub fn set_seq_count(&mut self, seq_count: u16) -> bool;
} });
}
#[inline]
pub fn set_seq_flags(&mut self, seq_flags: SequenceFlags) { pub fn set_seq_flags(&mut self, seq_flags: SequenceFlags) {
self.psc.seq_flags = seq_flags; self.psc.seq_flags = seq_flags;
} }
#[inline]
pub fn set_sec_header_flag(&mut self) { pub fn set_sec_header_flag(&mut self) {
self.packet_id.sec_header_flag = true; self.packet_id.sec_header_flag = true;
} }
#[inline]
pub fn clear_sec_header_flag(&mut self) { pub fn clear_sec_header_flag(&mut self) {
self.packet_id.sec_header_flag = false; self.packet_id.sec_header_flag = false;
} }
#[inline]
pub fn set_packet_type(&mut self, packet_type: PacketType) { pub fn set_packet_type(&mut self, packet_type: PacketType) {
self.packet_id.ptype = packet_type; self.packet_id.ptype = packet_type;
} }
@ -697,8 +598,8 @@ impl SpHeader {
expected: CCSDS_HEADER_LEN, expected: CCSDS_HEADER_LEN,
}); });
} }
let zc_header = zc::SpHeader::read_from_bytes(&buf[0..CCSDS_HEADER_LEN]) let zc_header = zc::SpHeader::from_bytes(&buf[0..CCSDS_HEADER_LEN])
.map_err(|_| ByteConversionError::ZeroCopyFromError)?; .ok_or(ByteConversionError::ZeroCopyFromError)?;
Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..])) Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..]))
} }
@ -716,19 +617,10 @@ impl SpHeader {
} }
let zc_header: zc::SpHeader = zc::SpHeader::from(*self); let zc_header: zc::SpHeader = zc::SpHeader::from(*self);
zc_header zc_header
.write_to(&mut buf[0..CCSDS_HEADER_LEN]) .to_bytes(&mut buf[0..CCSDS_HEADER_LEN])
.map_err(|_| ByteConversionError::ZeroCopyToError)?; .ok_or(ByteConversionError::ZeroCopyToError)?;
Ok(&mut buf[CCSDS_HEADER_LEN..]) Ok(&mut buf[CCSDS_HEADER_LEN..])
} }
/// Create a vector containing the CCSDS header.
#[cfg(feature = "alloc")]
pub fn to_vec(&self) -> alloc::vec::Vec<u8> {
let mut vec = alloc::vec![0; CCSDS_HEADER_LEN];
// This can not fail.
self.write_to_be_bytes(&mut vec[..]).unwrap();
vec
}
} }
impl CcsdsPacket for SpHeader { impl CcsdsPacket for SpHeader {
@ -754,7 +646,6 @@ impl CcsdsPacket for SpHeader {
} }
impl CcsdsPrimaryHeader for SpHeader { impl CcsdsPrimaryHeader for SpHeader {
#[inline]
fn from_composite_fields( fn from_composite_fields(
packet_id: PacketId, packet_id: PacketId,
psc: PacketSequenceCtrl, psc: PacketSequenceCtrl,
@ -779,9 +670,9 @@ sph_from_other!(SpHeader, crate::zc::SpHeader);
pub mod zc { pub mod zc {
use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK}; use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK};
use zerocopy::byteorder::NetworkEndian; use zerocopy::byteorder::NetworkEndian;
use zerocopy::{FromBytes, Immutable, IntoBytes, Unaligned, U16}; use zerocopy::{AsBytes, FromBytes, FromZeroes, Unaligned, U16};
#[derive(FromBytes, IntoBytes, Immutable, Unaligned, Debug)] #[derive(FromBytes, FromZeroes, AsBytes, Unaligned, Debug)]
#[repr(C)] #[repr(C)]
pub struct SpHeader { pub struct SpHeader {
version_packet_id: U16<NetworkEndian>, version_packet_id: U16<NetworkEndian>,
@ -806,6 +697,14 @@ pub mod zc {
data_len: U16::from(data_len), data_len: U16::from(data_len),
} }
} }
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
SpHeader::read_from(slice)
}
pub fn to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
} }
impl CcsdsPacket for SpHeader { impl CcsdsPacket for SpHeader {
@ -814,12 +713,9 @@ pub mod zc {
((self.version_packet_id.get() >> 13) as u8) & 0b111 ((self.version_packet_id.get() >> 13) as u8) & 0b111
} }
#[inline]
fn packet_id(&self) -> PacketId { fn packet_id(&self) -> PacketId {
PacketId::from(self.packet_id_raw()) PacketId::from(self.packet_id_raw())
} }
#[inline]
fn psc(&self) -> PacketSequenceCtrl { fn psc(&self) -> PacketSequenceCtrl {
PacketSequenceCtrl::from(self.psc_raw()) PacketSequenceCtrl::from(self.psc_raw())
} }
@ -829,12 +725,10 @@ pub mod zc {
self.data_len.get() self.data_len.get()
} }
#[inline]
fn packet_id_raw(&self) -> u16 { fn packet_id_raw(&self) -> u16 {
self.version_packet_id.get() & (!VERSION_MASK) self.version_packet_id.get() & (!VERSION_MASK)
} }
#[inline]
fn psc_raw(&self) -> u16 { fn psc_raw(&self) -> u16 {
self.psc.get() self.psc.get()
} }
@ -858,8 +752,6 @@ pub mod zc {
pub(crate) mod tests { pub(crate) mod tests {
use std::collections::HashSet; use std::collections::HashSet;
#[allow(unused_imports)]
use crate::ByteConversionError;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use crate::CcsdsPrimaryHeader; use crate::CcsdsPrimaryHeader;
use crate::{ use crate::{
@ -874,15 +766,14 @@ pub(crate) mod tests {
use postcard::{from_bytes, to_allocvec}; use postcard::{from_bytes, to_allocvec};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
use zerocopy::FromBytes;
const CONST_SP: SpHeader = SpHeader::new( const CONST_SP: SpHeader = SpHeader::new(
PacketId::new_for_tc(true, 0x36), PacketId::const_tc(true, 0x36),
PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 0x88), PacketSequenceCtrl::const_new(SequenceFlags::ContinuationSegment, 0x88),
0x90, 0x90,
); );
const PACKET_ID_TM: PacketId = PacketId::new_for_tm(true, 0x22); const PACKET_ID_TM: PacketId = PacketId::const_tm(true, 0x22);
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub(crate) fn generic_serde_test<T: Serialize + DeserializeOwned + PartialEq + Debug>( pub(crate) fn generic_serde_test<T: Serialize + DeserializeOwned + PartialEq + Debug>(
@ -894,12 +785,11 @@ pub(crate) mod tests {
} }
#[test] #[test]
#[allow(clippy::assertions_on_constants)]
fn verify_const_packet_id() { fn verify_const_packet_id() {
assert_eq!(PACKET_ID_TM.apid(), 0x22); assert_eq!(PACKET_ID_TM.apid(), 0x22);
assert!(PACKET_ID_TM.sec_header_flag); assert!(PACKET_ID_TM.sec_header_flag);
assert_eq!(PACKET_ID_TM.ptype, PacketType::Tm); assert_eq!(PACKET_ID_TM.ptype, PacketType::Tm);
let const_tc_id = PacketId::new_for_tc(true, 0x23); let const_tc_id = PacketId::const_tc(true, 0x23);
assert_eq!(const_tc_id.ptype, PacketType::Tc); assert_eq!(const_tc_id.ptype, PacketType::Tc);
} }
@ -908,32 +798,32 @@ pub(crate) mod tests {
let id_default = PacketId::default(); let id_default = PacketId::default();
assert_eq!(id_default.ptype, PacketType::Tm); assert_eq!(id_default.ptype, PacketType::Tm);
assert_eq!(id_default.apid, 0x000); assert_eq!(id_default.apid, 0x000);
assert!(!id_default.sec_header_flag); assert_eq!(id_default.sec_header_flag, false);
} }
#[test] #[test]
fn test_packet_id_ctors() { fn test_packet_id_ctors() {
let packet_id = PacketId::new_checked(PacketType::Tc, true, 0x1ff); let packet_id = PacketId::new(PacketType::Tc, true, 0x1ff);
assert!(packet_id.is_some()); assert!(packet_id.is_some());
let packet_id = packet_id.unwrap(); let packet_id = packet_id.unwrap();
assert_eq!(packet_id.apid(), 0x1ff); assert_eq!(packet_id.apid(), 0x1ff);
assert_eq!(packet_id.ptype, PacketType::Tc); assert_eq!(packet_id.ptype, PacketType::Tc);
assert!(packet_id.sec_header_flag); assert_eq!(packet_id.sec_header_flag, true);
let packet_id_tc = PacketId::new_for_tc_checked(true, 0x1ff); let packet_id_tc = PacketId::tc(true, 0x1ff);
assert!(packet_id_tc.is_some()); assert!(packet_id_tc.is_some());
let packet_id_tc = packet_id_tc.unwrap(); let packet_id_tc = packet_id_tc.unwrap();
assert_eq!(packet_id_tc, packet_id); assert_eq!(packet_id_tc, packet_id);
let packet_id_tm = PacketId::new_for_tm_checked(true, 0x2ff); let packet_id_tm = PacketId::tm(true, 0x2ff);
assert!(packet_id_tm.is_some()); assert!(packet_id_tm.is_some());
let packet_id_tm = packet_id_tm.unwrap(); let packet_id_tm = packet_id_tm.unwrap();
assert!(packet_id_tm.sec_header_flag); assert_eq!(packet_id_tm.sec_header_flag, true);
assert_eq!(packet_id_tm.ptype, PacketType::Tm); assert_eq!(packet_id_tm.ptype, PacketType::Tm);
assert_eq!(packet_id_tm.apid, 0x2ff); assert_eq!(packet_id_tm.apid, 0x2ff);
} }
#[test] #[test]
fn verify_const_sp_header() { fn verify_const_sp_header() {
assert!(CONST_SP.sec_header_flag()); assert_eq!(CONST_SP.sec_header_flag(), true);
assert_eq!(CONST_SP.apid(), 0x36); assert_eq!(CONST_SP.apid(), 0x36);
assert_eq!( assert_eq!(
CONST_SP.sequence_flags(), CONST_SP.sequence_flags(),
@ -974,7 +864,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_packet_id() { fn test_packet_id() {
let packet_id = let packet_id =
PacketId::new_checked(PacketType::Tm, false, 0x42).expect("Packet ID creation failed"); PacketId::new(PacketType::Tm, false, 0x42).expect("Packet ID creation failed");
assert_eq!(packet_id.raw(), 0x0042); assert_eq!(packet_id.raw(), 0x0042);
let packet_id_from_raw = PacketId::from(packet_id.raw()); let packet_id_from_raw = PacketId::from(packet_id.raw());
assert_eq!( assert_eq!(
@ -982,26 +872,26 @@ pub(crate) mod tests {
PacketType::Tm PacketType::Tm
); );
assert_eq!(packet_id_from_raw, packet_id); assert_eq!(packet_id_from_raw, packet_id);
let packet_id_from_new = PacketId::new_checked(PacketType::Tm, false, 0x42).unwrap(); let packet_id_from_new = PacketId::new(PacketType::Tm, false, 0x42).unwrap();
assert_eq!(packet_id_from_new, packet_id); assert_eq!(packet_id_from_new, packet_id);
} }
#[test] #[test]
fn test_invalid_packet_id() { fn test_invalid_packet_id() {
let packet_id_invalid = PacketId::new_checked(PacketType::Tc, true, 0xFFFF); let packet_id_invalid = PacketId::new(PacketType::Tc, true, 0xFFFF);
assert!(packet_id_invalid.is_none()); assert!(packet_id_invalid.is_none());
} }
#[test] #[test]
fn test_invalid_apid_setter() { fn test_invalid_apid_setter() {
let mut packet_id = let mut packet_id =
PacketId::new_checked(PacketType::Tm, false, 0x42).expect("Packet ID creation failed"); PacketId::new(PacketType::Tm, false, 0x42).expect("Packet ID creation failed");
assert!(!packet_id.set_apid(0xffff)); assert!(!packet_id.set_apid(0xffff));
} }
#[test] #[test]
fn test_invalid_seq_count() { fn test_invalid_seq_count() {
let mut psc = PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77) let mut psc = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77)
.expect("PSC creation failed"); .expect("PSC creation failed");
assert_eq!(psc.seq_count(), 77); assert_eq!(psc.seq_count(), 77);
assert!(!psc.set_seq_count(0xffff)); assert!(!psc.set_seq_count(0xffff));
@ -1009,7 +899,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_packet_seq_ctrl() { fn test_packet_seq_ctrl() {
let mut psc = PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77) let mut psc = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77)
.expect("PSC creation failed"); .expect("PSC creation failed");
assert_eq!(psc.raw(), 77); assert_eq!(psc.raw(), 77);
let psc_from_raw = PacketSequenceCtrl::from(psc.raw()); let psc_from_raw = PacketSequenceCtrl::from(psc.raw());
@ -1018,18 +908,16 @@ pub(crate) mod tests {
assert!(!psc.set_seq_count(2u16.pow(15))); assert!(!psc.set_seq_count(2u16.pow(15)));
assert_eq!(psc.raw(), 77); assert_eq!(psc.raw(), 77);
let psc_invalid = PacketSequenceCtrl::new_checked(SequenceFlags::FirstSegment, 0xFFFF); let psc_invalid = PacketSequenceCtrl::new(SequenceFlags::FirstSegment, 0xFFFF);
assert!(psc_invalid.is_none()); assert!(psc_invalid.is_none());
let psc_from_new = let psc_from_new = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77).unwrap();
PacketSequenceCtrl::new_checked(SequenceFlags::ContinuationSegment, 77).unwrap();
assert_eq!(psc_from_new, psc); assert_eq!(psc_from_new, psc);
} }
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
fn test_serde_sph() { fn test_serde_sph() {
let sp_header = let sp_header = SpHeader::tc_unseg(0x42, 12, 0).expect("Error creating SP header");
SpHeader::new_for_unseg_tc_checked(0x42, 12, 0).expect("Error creating SP header");
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert!(sp_header.is_tc()); assert!(sp_header.is_tc());
assert!(!sp_header.sec_header_flag()); assert!(!sp_header.sec_header_flag());
@ -1051,8 +939,7 @@ pub(crate) mod tests {
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert_eq!(sp_header.data_len, 0); assert_eq!(sp_header.data_len, 0);
let sp_header = let sp_header = SpHeader::tm_unseg(0x7, 22, 36).expect("Error creating SP header");
SpHeader::new_for_unseg_tm_checked(0x7, 22, 36).expect("Error creating SP header");
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert!(sp_header.is_tm()); assert!(sp_header.is_tm());
assert!(!sp_header.sec_header_flag()); assert!(!sp_header.sec_header_flag());
@ -1066,8 +953,8 @@ pub(crate) mod tests {
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
let from_comp_fields = SpHeader::from_composite_fields( let from_comp_fields = SpHeader::from_composite_fields(
PacketId::new(PacketType::Tc, true, 0x42), PacketId::new(PacketType::Tc, true, 0x42).unwrap(),
PacketSequenceCtrl::new(SequenceFlags::Unsegmented, 0x7), PacketSequenceCtrl::new(SequenceFlags::Unsegmented, 0x7).unwrap(),
0, 0,
None, None,
); );
@ -1084,7 +971,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_setters() { fn test_setters() {
let sp_header = SpHeader::new_for_tc_checked(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::tc(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let mut sp_header = sp_header.unwrap(); let mut sp_header = sp_header.unwrap();
sp_header.set_apid(0x12); sp_header.set_apid(0x12);
@ -1102,7 +989,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_tc_ctor() { fn test_tc_ctor() {
let sp_header = SpHeader::new_for_tc_checked(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::tc(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tc, &sp_header); verify_sp_fields(PacketType::Tc, &sp_header);
@ -1110,35 +997,23 @@ pub(crate) mod tests {
#[test] #[test]
fn test_tc_ctor_unseg() { fn test_tc_ctor_unseg() {
let sp_header = SpHeader::new_for_unseg_tc_checked(0x42, 25, 0); let sp_header = SpHeader::tc_unseg(0x42, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tc, &sp_header); verify_sp_fields(PacketType::Tc, &sp_header);
} }
#[test]
fn test_tc_ctor_unseg_const() {
let sp_header = SpHeader::new_for_unseg_tc(0x42, 25, 0);
verify_sp_fields(PacketType::Tc, &sp_header);
}
#[test] #[test]
fn test_tm_ctor() { fn test_tm_ctor() {
let sp_header = SpHeader::new_for_tm_checked(0x42, SequenceFlags::Unsegmented, 25, 0); let sp_header = SpHeader::tm(0x42, SequenceFlags::Unsegmented, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tm, &sp_header); verify_sp_fields(PacketType::Tm, &sp_header);
} }
#[test]
fn test_tm_ctor_const() {
let sp_header = SpHeader::new_for_tm(0x42, SequenceFlags::Unsegmented, 25, 0);
verify_sp_fields(PacketType::Tm, &sp_header);
}
#[test] #[test]
fn test_tm_ctor_unseg() { fn test_tm_ctor_unseg() {
let sp_header = SpHeader::new_for_unseg_tm_checked(0x42, 25, 0); let sp_header = SpHeader::tm_unseg(0x42, 25, 0);
assert!(sp_header.is_some()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
verify_sp_fields(PacketType::Tm, &sp_header); verify_sp_fields(PacketType::Tm, &sp_header);
@ -1154,10 +1029,10 @@ pub(crate) mod tests {
#[test] #[test]
fn test_zc_sph() { fn test_zc_sph() {
use zerocopy::IntoBytes; use zerocopy::AsBytes;
let sp_header = SpHeader::new_for_unseg_tc_checked(0x7FF, pow(2, 14) - 1, 0) let sp_header =
.expect("Error creating SP header"); SpHeader::tc_unseg(0x7FF, pow(2, 14) - 1, 0).expect("Error creating SP header");
assert_eq!(sp_header.ptype(), PacketType::Tc); assert_eq!(sp_header.ptype(), PacketType::Tc);
assert_eq!(sp_header.apid(), 0x7FF); assert_eq!(sp_header.apid(), 0x7FF);
assert_eq!(sp_header.data_len(), 0); assert_eq!(sp_header.data_len(), 0);
@ -1174,7 +1049,7 @@ pub(crate) mod tests {
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let mut slice = [0; 6]; let mut slice = [0; 6];
sp_header_zc.write_to(slice.as_mut_slice()).unwrap(); sp_header_zc.write_to(slice.as_mut_slice());
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
assert_eq!(slice[1], 0xFF); assert_eq!(slice[1], 0xFF);
@ -1185,7 +1060,7 @@ pub(crate) mod tests {
let mut test_vec = vec![0_u8; 6]; let mut test_vec = vec![0_u8; 6];
let slice = test_vec.as_mut_slice(); let slice = test_vec.as_mut_slice();
sp_header_zc.write_to(slice).unwrap(); sp_header_zc.write_to(slice);
let slice = test_vec.as_slice(); let slice = test_vec.as_slice();
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
@ -1195,8 +1070,8 @@ pub(crate) mod tests {
assert_eq!(slice[4], 0x00); assert_eq!(slice[4], 0x00);
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let sp_header = zc::SpHeader::read_from_bytes(slice); let sp_header = zc::SpHeader::from_bytes(slice);
assert!(sp_header.is_ok()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert_eq!(sp_header.packet_id_raw(), 0x17FF); assert_eq!(sp_header.packet_id_raw(), 0x17FF);
@ -1222,40 +1097,4 @@ pub(crate) mod tests {
let mut id_set = HashSet::new(); let mut id_set = HashSet::new();
id_set.insert(PacketId::from(1_u16)); id_set.insert(PacketId::from(1_u16));
} }
#[test]
fn sp_header_from_apid() {
let sp_header = SpHeader::new_from_apid(0x03);
assert_eq!(sp_header.apid(), 0x03);
assert_eq!(sp_header.data_len(), 0);
}
#[test]
fn sp_header_from_apid_checked() {
let sp_header = SpHeader::new_from_apid_checked(0x03).unwrap();
assert_eq!(sp_header.apid(), 0x03);
assert_eq!(sp_header.data_len(), 0);
}
#[cfg(feature = "defmt")]
fn is_defmt_format<T: defmt::Format>(_t: T) {}
#[test]
#[cfg(feature = "defmt")]
fn test_defmt_format() {
is_defmt_format(ByteConversionError::ToSliceTooSmall {
found: 1,
expected: 2,
});
}
#[test]
fn test_sp_header_as_vec() {
let sp_header = SpHeader::new_for_unseg_tc(0x42, 25, 1);
let sp_header_as_vec = sp_header.to_vec();
let sp_header_read_back = SpHeader::from_be_bytes(&sp_header_as_vec)
.expect("Error reading back SP header")
.0;
assert_eq!(sp_header, sp_header_read_back);
}
} }

View File

@ -1,250 +0,0 @@
use crate::MAX_SEQ_COUNT;
use core::cell::Cell;
use paste::paste;
#[cfg(feature = "std")]
pub use stdmod::*;
/// Core trait for objects which can provide a sequence count.
///
/// The core functions are not mutable on purpose to allow easier usage with
/// static structs when using the interior mutability pattern. This can be achieved by using
/// [Cell], [core::cell::RefCell] or atomic types.
pub trait SequenceCountProvider {
type Raw: Into<u64>;
const MAX_BIT_WIDTH: usize;
fn get(&self) -> Self::Raw;
fn increment(&self);
fn get_and_increment(&self) -> Self::Raw {
let val = self.get();
self.increment();
val
}
}
#[derive(Clone)]
pub struct SeqCountProviderSimple<T: Copy> {
seq_count: Cell<T>,
max_val: T,
}
macro_rules! impl_for_primitives {
($($ty: ident,)+) => {
$(
paste! {
impl SeqCountProviderSimple<$ty> {
pub fn [<new_custom_max_val_ $ty>](max_val: $ty) -> Self {
Self {
seq_count: Cell::new(0),
max_val,
}
}
pub fn [<new_ $ty>]() -> Self {
Self {
seq_count: Cell::new(0),
max_val: $ty::MAX
}
}
}
impl Default for SeqCountProviderSimple<$ty> {
fn default() -> Self {
Self::[<new_ $ty>]()
}
}
impl SequenceCountProvider for SeqCountProviderSimple<$ty> {
type Raw = $ty;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
fn get(&self) -> Self::Raw {
self.seq_count.get()
}
fn increment(&self) {
self.get_and_increment();
}
fn get_and_increment(&self) -> Self::Raw {
let curr_count = self.seq_count.get();
if curr_count == self.max_val {
self.seq_count.set(0);
} else {
self.seq_count.set(curr_count + 1);
}
curr_count
}
}
}
)+
}
}
impl_for_primitives!(u8, u16, u32, u64,);
/// This is a sequence count provider which wraps around at [MAX_SEQ_COUNT].
#[derive(Clone)]
pub struct CcsdsSimpleSeqCountProvider {
provider: SeqCountProviderSimple<u16>,
}
impl Default for CcsdsSimpleSeqCountProvider {
fn default() -> Self {
Self {
provider: SeqCountProviderSimple::new_custom_max_val_u16(MAX_SEQ_COUNT),
}
}
}
impl SequenceCountProvider for CcsdsSimpleSeqCountProvider {
type Raw = u16;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
delegate::delegate! {
to self.provider {
fn get(&self) -> u16;
fn increment(&self);
fn get_and_increment(&self) -> u16;
}
}
}
#[cfg(feature = "std")]
pub mod stdmod {
use super::*;
use std::sync::{Arc, Mutex};
macro_rules! sync_clonable_seq_counter_impl {
($($ty: ident,)+) => {
$(paste! {
/// These sequence counters can be shared between threads and can also be
/// configured to wrap around at specified maximum values. Please note that
/// that the API provided by this class will not panic und [Mutex] lock errors,
/// but it will yield 0 for the getter functions.
#[derive(Clone, Default)]
pub struct [<SeqCountProviderSync $ty:upper>] {
seq_count: Arc<Mutex<$ty>>,
max_val: $ty
}
impl [<SeqCountProviderSync $ty:upper>] {
pub fn new() -> Self {
Self::new_with_max_val($ty::MAX)
}
pub fn new_with_max_val(max_val: $ty) -> Self {
Self {
seq_count: Arc::default(),
max_val
}
}
}
impl SequenceCountProvider for [<SeqCountProviderSync $ty:upper>] {
type Raw = $ty;
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
fn get(&self) -> $ty {
match self.seq_count.lock() {
Ok(counter) => *counter,
Err(_) => 0
}
}
fn increment(&self) {
self.get_and_increment();
}
fn get_and_increment(&self) -> $ty {
match self.seq_count.lock() {
Ok(mut counter) => {
let val = *counter;
if val == self.max_val {
*counter = 0;
} else {
*counter += 1;
}
val
}
Err(_) => 0,
}
}
}
})+
}
}
sync_clonable_seq_counter_impl!(u8, u16, u32, u64,);
}
#[cfg(test)]
mod tests {
use crate::seq_count::{
CcsdsSimpleSeqCountProvider, SeqCountProviderSimple, SeqCountProviderSyncU8,
SequenceCountProvider,
};
use crate::MAX_SEQ_COUNT;
#[test]
fn test_u8_counter() {
let u8_counter = SeqCountProviderSimple::<u8>::default();
assert_eq!(u8_counter.get(), 0);
assert_eq!(u8_counter.get_and_increment(), 0);
assert_eq!(u8_counter.get_and_increment(), 1);
assert_eq!(u8_counter.get(), 2);
}
#[test]
fn test_u8_counter_overflow() {
let u8_counter = SeqCountProviderSimple::new_u8();
for _ in 0..256 {
u8_counter.increment();
}
assert_eq!(u8_counter.get(), 0);
}
#[test]
fn test_ccsds_counter() {
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
assert_eq!(ccsds_counter.get(), 0);
assert_eq!(ccsds_counter.get_and_increment(), 0);
assert_eq!(ccsds_counter.get_and_increment(), 1);
assert_eq!(ccsds_counter.get(), 2);
}
#[test]
fn test_ccsds_counter_overflow() {
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
for _ in 0..MAX_SEQ_COUNT + 1 {
ccsds_counter.increment();
}
assert_eq!(ccsds_counter.get(), 0);
}
#[test]
fn test_atomic_ref_counters() {
let sync_u8_counter = SeqCountProviderSyncU8::new();
assert_eq!(sync_u8_counter.get(), 0);
assert_eq!(sync_u8_counter.get_and_increment(), 0);
assert_eq!(sync_u8_counter.get_and_increment(), 1);
assert_eq!(sync_u8_counter.get(), 2);
}
#[test]
fn test_atomic_ref_counters_overflow() {
let sync_u8_counter = SeqCountProviderSyncU8::new();
for _ in 0..u8::MAX as u16 + 1 {
sync_u8_counter.increment();
}
assert_eq!(sync_u8_counter.get(), 0);
}
#[test]
fn test_atomic_ref_counters_overflow_custom_max_val() {
let sync_u8_counter = SeqCountProviderSyncU8::new_with_max_val(128);
for _ in 0..129 {
sync_u8_counter.increment();
}
assert_eq!(sync_u8_counter.get(), 0);
}
}

View File

@ -40,11 +40,13 @@ pub mod alloc_mod_chrono {
}; };
/// Generates a time code formatter using the [FMT_STR_CODE_A_WITH_SIZE] format. /// Generates a time code formatter using the [FMT_STR_CODE_A_WITH_SIZE] format.
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "alloc", feature = "chrono"))))]
pub fn generate_time_code_a(date: &DateTime<Utc>) -> DelayedFormat<StrftimeItems<'static>> { pub fn generate_time_code_a(date: &DateTime<Utc>) -> DelayedFormat<StrftimeItems<'static>> {
date.format(FMT_STR_CODE_A_WITH_SIZE.0) date.format(FMT_STR_CODE_A_WITH_SIZE.0)
} }
/// Generates a time code formatter using the [FMT_STR_CODE_A_TERMINATED_WITH_SIZE] format. /// Generates a time code formatter using the [FMT_STR_CODE_A_TERMINATED_WITH_SIZE] format.
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "alloc", feature = "chrono"))))]
pub fn generate_time_code_a_terminated( pub fn generate_time_code_a_terminated(
date: &DateTime<Utc>, date: &DateTime<Utc>,
) -> DelayedFormat<StrftimeItems<'static>> { ) -> DelayedFormat<StrftimeItems<'static>> {
@ -52,11 +54,13 @@ pub mod alloc_mod_chrono {
} }
/// Generates a time code formatter using the [FMT_STR_CODE_B_WITH_SIZE] format. /// Generates a time code formatter using the [FMT_STR_CODE_B_WITH_SIZE] format.
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "alloc", feature = "chrono"))))]
pub fn generate_time_code_b(date: &DateTime<Utc>) -> DelayedFormat<StrftimeItems<'static>> { pub fn generate_time_code_b(date: &DateTime<Utc>) -> DelayedFormat<StrftimeItems<'static>> {
date.format(FMT_STR_CODE_B_WITH_SIZE.0) date.format(FMT_STR_CODE_B_WITH_SIZE.0)
} }
/// Generates a time code formatter using the [FMT_STR_CODE_B_TERMINATED_WITH_SIZE] format. /// Generates a time code formatter using the [FMT_STR_CODE_B_TERMINATED_WITH_SIZE] format.
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "alloc", feature = "chrono"))))]
pub fn generate_time_code_b_terminated( pub fn generate_time_code_b_terminated(
date: &DateTime<Utc>, date: &DateTime<Utc>,
) -> DelayedFormat<StrftimeItems<'static>> { ) -> DelayedFormat<StrftimeItems<'static>> {
@ -71,19 +75,7 @@ mod tests {
use std::format; use std::format;
#[test] #[test]
fn test_ascii_timestamp_a_unterminated_epoch() { fn test_ascii_timestamp_a_unterminated() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
assert_eq!(stamp.len(), FMT_STR_CODE_A_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_unterminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_a(&date); let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -94,24 +86,7 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_a_terminated_epoch() { fn test_ascii_timestamp_a_terminated() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_terminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_a_terminated(&date); let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -128,19 +103,7 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_b_unterminated_epoch() { fn test_ascii_timestamp_b_unterminated() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
assert_eq!(stamp.len(), FMT_STR_CODE_B_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_unterminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_b(&date); let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);
@ -151,25 +114,7 @@ mod tests {
} }
#[test] #[test]
fn test_ascii_timestamp_b_terminated_epoch() { fn test_ascii_timestamp_b_terminated() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_terminated_now() {
let date = Utc::now(); let date = Utc::now();
let stamp_formatter = generate_time_code_b_terminated(&date); let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter); let stamp = format!("{}", stamp_formatter);

View File

@ -1,19 +1,23 @@
//! Module to generate or read CCSDS Day Segmented (CDS) timestamps as specified in //! Module to generate or read CCSDS Day Segmented (CDS) timestamps as specified in
//! [CCSDS 301.0-B-4](https://public.ccsds.org/Pubs/301x0b4e1.pdf) section 3.3 . //! [CCSDS 301.0-B-4](https://public.ccsds.org/Pubs/301x0b4e1.pdf) section 3.3 .
//! //!
//! The core data structure to do this is the [CdsTime] struct and the //! The core data structure to do this is the [TimeProvider] struct and the
//! [get_dyn_time_provider_from_bytes] function to retrieve correct instances of the //! [get_dyn_time_provider_from_bytes] function to retrieve correct instances of the
//! struct from a bytestream. //! struct from a bytestream.
use crate::private::Sealed; use crate::private::Sealed;
use crate::ByteConversionError; use crate::ByteConversionError;
use core::cmp::Ordering; use core::cmp::Ordering;
use core::fmt::Debug; use core::fmt::{Debug, Display, Formatter};
use core::ops::{Add, AddAssign}; use core::ops::{Add, AddAssign};
use core::time::Duration; use core::time::Duration;
use delegate::delegate;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use super::StdTimestampError; use super::StdTimestampError;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "std")]
use std::time::{SystemTime, SystemTimeError}; use std::time::{SystemTime, SystemTimeError};
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
@ -30,9 +34,8 @@ use core::any::Any;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{ use super::{
ccsds_to_unix_days, unix_to_ccsds_days, CcsdsTimeCode, CcsdsTimeProvider, ccsds_to_unix_days, unix_to_ccsds_days, CcsdsTimeCode, CcsdsTimeProvider, TimeReader,
DateBeforeCcsdsEpochError, TimeReader, TimeWriter, TimestampError, UnixTime, MS_PER_DAY, TimeWriter, TimestampError, UnixTime, MS_PER_DAY, SECONDS_PER_DAY,
SECONDS_PER_DAY,
}; };
/// Base value for the preamble field for a time field parser to determine the time field type. /// Base value for the preamble field for a time field parser to determine the time field type.
@ -55,7 +58,7 @@ pub trait ProvidesDaysLength: Sealed + Clone {
+ Into<i64>; + Into<i64>;
} }
/// Type level token to be used as a generic parameter to [CdsTime]. /// Type level token to be used as a generic parameter to [TimeProvider].
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
pub struct DaysLen16Bits {} pub struct DaysLen16Bits {}
@ -64,7 +67,7 @@ impl ProvidesDaysLength for DaysLen16Bits {
type FieldType = u16; type FieldType = u16;
} }
/// Type level token to be used as a generic parameter to [CdsTime]. /// Type level token to be used as a generic parameter to [TimeProvider].
#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]
pub struct DaysLen24Bits {} pub struct DaysLen24Bits {}
impl Sealed for DaysLen24Bits {} impl Sealed for DaysLen24Bits {}
@ -74,7 +77,6 @@ impl ProvidesDaysLength for DaysLen24Bits {
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum LengthOfDaySegment { pub enum LengthOfDaySegment {
Short16Bits = 0, Short16Bits = 0,
Long24Bits = 1, Long24Bits = 1,
@ -89,21 +91,34 @@ pub enum SubmillisPrecision {
Reserved = 0b11, Reserved = 0b11,
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum CdsError { pub enum CdsError {
/// CCSDS days value exceeds maximum allowed size or is negative /// CCSDS days value exceeds maximum allowed size or is negative
#[error("invalid ccsds days {0}")]
InvalidCcsdsDays(i64), InvalidCcsdsDays(i64),
/// There are distinct constructors depending on the days field width detected in the preamble /// There are distinct constructors depending on the days field width detected in the preamble
/// field. This error will be returned if there is a missmatch. /// field. This error will be returned if there is a missmatch.
#[error("wrong constructor for length of day {0:?} detected in preamble")]
InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment), InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment),
#[error("date before CCSDS epoch: {0}")]
DateBeforeCcsdsEpoch(#[from] DateBeforeCcsdsEpochError),
} }
impl Display for CdsError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
CdsError::InvalidCcsdsDays(days) => {
write!(f, "invalid ccsds days {days}")
}
CdsError::InvalidCtorForDaysOfLenInPreamble(length_of_day) => {
write!(
f,
"wrong constructor for length of day {length_of_day:?} detected in preamble",
)
}
}
}
}
#[cfg(feature = "std")]
impl Error for CdsError {}
pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment { pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
if (pfield >> 2) & 0b1 == 1 { if (pfield >> 2) & 0b1 == 1 {
return LengthOfDaySegment::Long24Bits; return LengthOfDaySegment::Long24Bits;
@ -111,7 +126,6 @@ pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
LengthOfDaySegment::Short16Bits LengthOfDaySegment::Short16Bits
} }
#[inline]
pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision { pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision {
match pfield & 0b11 { match pfield & 0b11 {
0b01 => SubmillisPrecision::Microseconds, 0b01 => SubmillisPrecision::Microseconds,
@ -142,19 +156,19 @@ pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision {
/// ///
/// ``` /// ```
/// use core::time::Duration; /// use core::time::Duration;
/// use spacepackets::time::cds::{CdsTime, length_of_day_segment_from_pfield, LengthOfDaySegment}; /// use spacepackets::time::cds::{TimeProvider, length_of_day_segment_from_pfield, LengthOfDaySegment};
/// use spacepackets::time::{TimeWriter, CcsdsTimeCode, CcsdsTimeProvider}; /// use spacepackets::time::{TimeWriter, CcsdsTimeCodes, CcsdsTimeProvider};
/// ///
/// let timestamp_now = CdsTime::now_with_u16_days().unwrap(); /// let timestamp_now = TimeProvider::from_now_with_u16_days().unwrap();
/// let mut raw_stamp = [0; 7]; /// let mut raw_stamp = [0; 7];
/// { /// {
/// let written = timestamp_now.write_to_bytes(&mut raw_stamp).unwrap(); /// let written = timestamp_now.write_to_bytes(&mut raw_stamp).unwrap();
/// assert_eq!((raw_stamp[0] >> 4) & 0b111, CcsdsTimeCode::Cds as u8); /// assert_eq!((raw_stamp[0] >> 4) & 0b111, CcsdsTimeCodes::Cds as u8);
/// assert_eq!(written, 7); /// assert_eq!(written, 7);
/// } /// }
/// { /// {
/// assert_eq!(length_of_day_segment_from_pfield(raw_stamp[0]), LengthOfDaySegment::Short16Bits); /// assert_eq!(length_of_day_segment_from_pfield(raw_stamp[0]), LengthOfDaySegment::Short16Bits);
/// let read_result = CdsTime::from_bytes_with_u16_days(&raw_stamp); /// let read_result = TimeProvider::from_bytes_with_u16_days(&raw_stamp);
/// assert!(read_result.is_ok()); /// assert!(read_result.is_ok());
/// let stamp_deserialized = read_result.unwrap(); /// let stamp_deserialized = read_result.unwrap();
/// assert_eq!(stamp_deserialized.len_as_bytes(), 7); /// assert_eq!(stamp_deserialized.len_as_bytes(), 7);
@ -174,7 +188,7 @@ pub struct CdsTime<DaysLen: ProvidesDaysLength = DaysLen16Bits> {
submillis: u32, submillis: u32,
/// This is not strictly necessary but still cached because it significantly simplifies the /// This is not strictly necessary but still cached because it significantly simplifies the
/// calculation of [`DateTime<Utc>`]. /// calculation of [`DateTime<Utc>`].
unix_time: UnixTime, unix_stamp: UnixTime,
} }
/// Common properties for all CDS time providers. /// Common properties for all CDS time providers.
@ -212,11 +226,11 @@ impl ConversionFromUnix {
unix_seconds: i64, unix_seconds: i64,
subsec_nanos: u32, subsec_nanos: u32,
precision: SubmillisPrecision, precision: SubmillisPrecision,
) -> Result<Self, DateBeforeCcsdsEpochError> { ) -> Result<Self, TimestampError> {
let (unix_days, secs_of_day) = calc_unix_days_and_secs_of_day(unix_seconds); let (unix_days, secs_of_day) = calc_unix_days_and_secs_of_day(unix_seconds);
let ccsds_days = unix_to_ccsds_days(unix_days); let ccsds_days = unix_to_ccsds_days(unix_days);
if ccsds_days == 0 && (secs_of_day > 0 || subsec_nanos > 0) || ccsds_days < 0 { if ccsds_days == 0 && (secs_of_day > 0 || subsec_nanos > 0) || ccsds_days < 0 {
return Err(DateBeforeCcsdsEpochError( return Err(TimestampError::DateBeforeCcsdsEpoch(
UnixTime::new_checked(unix_seconds, subsec_nanos) UnixTime::new_checked(unix_seconds, subsec_nanos)
.expect("unix timestamp creation failed"), .expect("unix timestamp creation failed"),
)); ));
@ -239,70 +253,54 @@ impl ConversionFromUnix {
} }
impl CdsCommon for ConversionFromUnix { impl CdsCommon for ConversionFromUnix {
#[inline]
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submilis_prec self.submilis_prec
} }
#[inline]
fn ms_of_day(&self) -> u32 { fn ms_of_day(&self) -> u32 {
self.ms_of_day self.ms_of_day
} }
#[inline]
fn ccsds_days_as_u32(&self) -> u32 { fn ccsds_days_as_u32(&self) -> u32 {
self.ccsds_days self.ccsds_days
} }
#[inline]
fn submillis(&self) -> u32 { fn submillis(&self) -> u32 {
self.submillis self.submillis
} }
} }
impl CdsConverter for ConversionFromUnix { impl CdsConverter for ConversionFromUnix {
#[inline]
fn unix_days_seconds(&self) -> i64 { fn unix_days_seconds(&self) -> i64 {
self.unix_days_seconds self.unix_days_seconds
} }
} }
/// Helper struct which generates fields for the CDS time provider from a datetime. /// Helper struct which generates fields for the CDS time provider from a datetime.
#[cfg(feature = "chrono")]
struct ConversionFromChronoDatetime { struct ConversionFromChronoDatetime {
unix_conversion: ConversionFromUnix, unix_conversion: ConversionFromUnix,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
submillis: u32, submillis: u32,
} }
#[cfg(feature = "chrono")]
impl CdsCommon for ConversionFromChronoDatetime { impl CdsCommon for ConversionFromChronoDatetime {
#[inline]
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate::delegate! { delegate! {
to self.unix_conversion { to self.unix_conversion {
#[inline]
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
#[inline]
fn ccsds_days_as_u32(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32;
} }
} }
#[inline]
fn submillis(&self) -> u32 { fn submillis(&self) -> u32 {
self.submillis self.submillis
} }
} }
#[cfg(feature = "chrono")]
impl CdsConverter for ConversionFromChronoDatetime { impl CdsConverter for ConversionFromChronoDatetime {
delegate::delegate! {to self.unix_conversion { delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
#[inline]
fn unix_days_seconds(&self) -> i64;
}}
} }
#[inline] #[inline]
@ -320,29 +318,30 @@ fn calc_unix_days_and_secs_of_day(unix_seconds: i64) -> (i64, u32) {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
impl ConversionFromChronoDatetime { impl ConversionFromChronoDatetime {
fn new(dt: &chrono::DateTime<chrono::Utc>) -> Result<Self, DateBeforeCcsdsEpochError> { fn new(dt: &chrono::DateTime<chrono::Utc>) -> Result<Self, TimestampError> {
Self::new_generic(dt, SubmillisPrecision::Absent) Self::new_generic(dt, SubmillisPrecision::Absent)
} }
fn new_with_submillis_us_prec( fn new_with_submillis_us_prec(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, DateBeforeCcsdsEpochError> { ) -> Result<Self, TimestampError> {
Self::new_generic(dt, SubmillisPrecision::Microseconds) Self::new_generic(dt, SubmillisPrecision::Microseconds)
} }
fn new_with_submillis_ps_prec( fn new_with_submillis_ps_prec(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, DateBeforeCcsdsEpochError> { ) -> Result<Self, TimestampError> {
Self::new_generic(dt, SubmillisPrecision::Picoseconds) Self::new_generic(dt, SubmillisPrecision::Picoseconds)
} }
#[cfg(feature = "chrono")]
fn new_generic( fn new_generic(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
prec: SubmillisPrecision, prec: SubmillisPrecision,
) -> Result<Self, DateBeforeCcsdsEpochError> { ) -> Result<Self, TimestampError> {
// The CDS timestamp does not support timestamps before the CCSDS epoch. // The CDS timestamp does not support timestamps before the CCSDS epoch.
if dt.year() < 1958 { if dt.year() < 1958 {
return Err(DateBeforeCcsdsEpochError(UnixTime::from(*dt))); return Err(TimestampError::DateBeforeCcsdsEpoch(UnixTime::from(*dt)));
} }
// The contained values in the conversion should be all positive now // The contained values in the conversion should be all positive now
let unix_conversion = let unix_conversion =
@ -417,7 +416,7 @@ impl CdsCommon for ConversionFromNow {
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate::delegate! { delegate! {
to self.unix_conversion { to self.unix_conversion {
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
fn ccsds_days_as_u32(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32;
@ -431,32 +430,35 @@ impl CdsCommon for ConversionFromNow {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl CdsConverter for ConversionFromNow { impl CdsConverter for ConversionFromNow {
delegate::delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }} delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
} }
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub trait DynCdsTimeProvider: CcsdsTimeProvider + CdsTimestamp + TimeWriter + Any {} pub trait DynCdsTimeProvider: CcsdsTimeProvider + CdsTimestamp + TimeWriter + Any {}
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
impl DynCdsTimeProvider for CdsTime<DaysLen16Bits> {} impl DynCdsTimeProvider for CdsTime<DaysLen16Bits> {}
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
impl DynCdsTimeProvider for CdsTime<DaysLen24Bits> {} impl DynCdsTimeProvider for CdsTime<DaysLen24Bits> {}
/// This function returns the correct [CdsTime] instance from a raw byte array /// This function returns the correct [TimeProvider] instance from a raw byte array
/// by checking the length of days field. It also checks the CCSDS time code for correctness. /// by checking the length of days field. It also checks the CCSDS time code for correctness.
/// ///
/// # Example /// # Example
/// ///
/// ``` /// ```
/// use spacepackets::time::cds::{ /// use spacepackets::time::cds::{
/// CdsTime, LengthOfDaySegment, get_dyn_time_provider_from_bytes, SubmillisPrecision, /// TimeProvider, LengthOfDaySegment, get_dyn_time_provider_from_bytes, SubmillisPrecision,
/// }; /// };
/// use spacepackets::time::{TimeWriter, CcsdsTimeCode, CcsdsTimeProvider}; /// use spacepackets::time::{TimeWriter, CcsdsTimeCodes, CcsdsTimeProvider};
/// ///
/// let timestamp_now = CdsTime::new_with_u16_days(24, 24); /// let timestamp_now = TimeProvider::new_with_u16_days(24, 24);
/// let mut raw_stamp = [0; 7]; /// let mut raw_stamp = [0; 7];
/// { /// {
/// let written = timestamp_now.write_to_bytes(&mut raw_stamp).unwrap(); /// let written = timestamp_now.write_to_bytes(&mut raw_stamp).unwrap();
/// assert_eq!((raw_stamp[0] >> 4) & 0b111, CcsdsTimeCode::Cds as u8); /// assert_eq!((raw_stamp[0] >> 4) & 0b111, CcsdsTimeCodes::Cds as u8);
/// assert_eq!(written, 7); /// assert_eq!(written, 7);
/// } /// }
/// { /// {
@ -468,6 +470,7 @@ impl DynCdsTimeProvider for CdsTime<DaysLen24Bits> {}
/// } /// }
/// ``` /// ```
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub fn get_dyn_time_provider_from_bytes( pub fn get_dyn_time_provider_from_bytes(
buf: &[u8], buf: &[u8],
) -> Result<Box<dyn DynCdsTimeProvider>, TimestampError> { ) -> Result<Box<dyn DynCdsTimeProvider>, TimestampError> {
@ -569,7 +572,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
)); ));
} }
let pfield = buf[0]; let pfield = buf[0];
match CcsdsTimeCode::try_from((pfield >> 4) & 0b111) { match CcsdsTimeCode::try_from(pfield >> 4 & 0b111) {
Ok(cds_type) => match cds_type { Ok(cds_type) => match cds_type {
CcsdsTimeCode::Cds => (), CcsdsTimeCode::Cds => (),
_ => { _ => {
@ -582,7 +585,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
_ => { _ => {
return Err(TimestampError::InvalidTimeCode { return Err(TimestampError::InvalidTimeCode {
expected: CcsdsTimeCode::Cds, expected: CcsdsTimeCode::Cds,
found: (pfield >> 4) & 0b111, found: pfield >> 4 & 0b111,
}); });
} }
}; };
@ -638,7 +641,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
if let Some(precision) = self.precision_as_ns() { if let Some(precision) = self.precision_as_ns() {
subsec_nanos += precision; subsec_nanos += precision;
} }
self.unix_time = UnixTime::new(unix_days_seconds, subsec_nanos); self.unix_stamp = UnixTime::new(unix_days_seconds, subsec_nanos);
} }
fn length_check(&self, buf: &[u8], len_as_bytes: usize) -> Result<(), TimestampError> { fn length_check(&self, buf: &[u8], len_as_bytes: usize) -> Result<(), TimestampError> {
@ -665,7 +668,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
pfield: Self::generate_p_field(days_len, SubmillisPrecision::Absent), pfield: Self::generate_p_field(days_len, SubmillisPrecision::Absent),
ccsds_days, ccsds_days,
ms_of_day, ms_of_day,
unix_time: Default::default(), unix_stamp: Default::default(),
submillis: 0, submillis: 0,
}; };
let unix_days_seconds = ccsds_to_unix_days(i64::from(ccsds_days)) * SECONDS_PER_DAY as i64; let unix_days_seconds = ccsds_to_unix_days(i64::from(ccsds_days)) * SECONDS_PER_DAY as i64;
@ -677,7 +680,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
fn from_dt_generic( fn from_dt_generic(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
days_len: LengthOfDaySegment, days_len: LengthOfDaySegment,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
let conv_from_dt = ConversionFromChronoDatetime::new(dt)?; let conv_from_dt = ConversionFromChronoDatetime::new(dt)?;
Self::generic_from_conversion(days_len, conv_from_dt) Self::generic_from_conversion(days_len, conv_from_dt)
} }
@ -686,7 +689,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
fn from_dt_generic_us_prec( fn from_dt_generic_us_prec(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
days_len: LengthOfDaySegment, days_len: LengthOfDaySegment,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
let conv_from_dt = ConversionFromChronoDatetime::new_with_submillis_us_prec(dt)?; let conv_from_dt = ConversionFromChronoDatetime::new_with_submillis_us_prec(dt)?;
Self::generic_from_conversion(days_len, conv_from_dt) Self::generic_from_conversion(days_len, conv_from_dt)
} }
@ -695,7 +698,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
fn from_dt_generic_ps_prec( fn from_dt_generic_ps_prec(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
days_len: LengthOfDaySegment, days_len: LengthOfDaySegment,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
let conv_from_dt = ConversionFromChronoDatetime::new_with_submillis_ps_prec(dt)?; let conv_from_dt = ConversionFromChronoDatetime::new_with_submillis_ps_prec(dt)?;
Self::generic_from_conversion(days_len, conv_from_dt) Self::generic_from_conversion(days_len, conv_from_dt)
} }
@ -704,46 +707,48 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
unix_stamp: &UnixTime, unix_stamp: &UnixTime,
days_len: LengthOfDaySegment, days_len: LengthOfDaySegment,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
let conv_from_dt = let conv_from_dt =
ConversionFromUnix::new(unix_stamp.secs, unix_stamp.subsec_nanos, submillis_prec)?; ConversionFromUnix::new(unix_stamp.secs, unix_stamp.subsec_nanos, submillis_prec)?;
Self::generic_from_conversion(days_len, conv_from_dt) Self::generic_from_conversion(days_len, conv_from_dt)
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
fn now_generic(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> { fn from_now_generic(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> {
let conversion_from_now = ConversionFromNow::new()?; let conversion_from_now = ConversionFromNow::new()?;
Self::generic_from_conversion(days_len, conversion_from_now) Self::generic_from_conversion(days_len, conversion_from_now)
.map_err(|e| StdTimestampError::Timestamp(TimestampError::from(e))) .map_err(StdTimestampError::Timestamp)
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
fn now_generic_with_us_prec(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> { fn from_now_generic_us_prec(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> {
let conversion_from_now = ConversionFromNow::new_with_submillis_us_prec()?; let conversion_from_now = ConversionFromNow::new_with_submillis_us_prec()?;
Self::generic_from_conversion(days_len, conversion_from_now) Self::generic_from_conversion(days_len, conversion_from_now)
.map_err(|e| StdTimestampError::Timestamp(TimestampError::from(e))) .map_err(StdTimestampError::Timestamp)
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
fn from_now_generic_ps_prec(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> { fn from_now_generic_ps_prec(days_len: LengthOfDaySegment) -> Result<Self, StdTimestampError> {
let conversion_from_now = ConversionFromNow::new_with_submillis_ps_prec()?; let conversion_from_now = ConversionFromNow::new_with_submillis_ps_prec()?;
Self::generic_from_conversion(days_len, conversion_from_now) Self::generic_from_conversion(days_len, conversion_from_now)
.map_err(|e| StdTimestampError::Timestamp(TimestampError::from(e))) .map_err(StdTimestampError::Timestamp)
} }
fn generic_from_conversion<C: CdsConverter>( fn generic_from_conversion<C: CdsConverter>(
days_len: LengthOfDaySegment, days_len: LengthOfDaySegment,
converter: C, converter: C,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
let ccsds_days: ProvidesDaysLen::FieldType = converter let ccsds_days: ProvidesDaysLen::FieldType =
.ccsds_days_as_u32() converter.ccsds_days_as_u32().try_into().map_err(|_| {
.try_into() TimestampError::Cds(CdsError::InvalidCcsdsDays(
.map_err(|_| CdsError::InvalidCcsdsDays(converter.ccsds_days_as_u32().into()))?; converter.ccsds_days_as_u32().into(),
))
})?;
let mut provider = Self { let mut provider = Self {
pfield: Self::generate_p_field(days_len, converter.submillis_precision()), pfield: Self::generate_p_field(days_len, converter.submillis_precision()),
ccsds_days, ccsds_days,
ms_of_day: converter.ms_of_day(), ms_of_day: converter.ms_of_day(),
unix_time: Default::default(), unix_stamp: Default::default(),
submillis: converter.submillis(), submillis: converter.submillis(),
}; };
provider.setup(converter.unix_days_seconds(), converter.ms_of_day()); provider.setup(converter.unix_days_seconds(), converter.ms_of_day());
@ -771,6 +776,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn update_from_now(&mut self) -> Result<(), StdTimestampError> { pub fn update_from_now(&mut self) -> Result<(), StdTimestampError> {
let conversion_from_now = self.generic_conversion_from_now()?; let conversion_from_now = self.generic_conversion_from_now()?;
let ccsds_days: ProvidesDaysLen::FieldType = conversion_from_now let ccsds_days: ProvidesDaysLen::FieldType = conversion_from_now
@ -806,19 +812,22 @@ impl CdsTime<DaysLen24Bits> {
/// Generate a time stamp from the current time using the system clock. /// Generate a time stamp from the current time using the system clock.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now_with_u24_days() -> Result<Self, StdTimestampError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
Self::now_generic(LengthOfDaySegment::Long24Bits) pub fn from_now_with_u24_days() -> Result<Self, StdTimestampError> {
Self::from_now_generic(LengthOfDaySegment::Long24Bits)
} }
/// Create a provider from a [`chrono::DateTime<chrono::Utc>`] struct. /// Create a provider from a [`DateTime<Utc>`] struct.
/// ///
/// ## Errors /// ## Errors
/// ///
/// This function will return [CdsError::DateBeforeCcsdsEpoch] if the time is before the CCSDS /// This function will return [TimestampError::DateBeforeCcsdsEpoch] or
/// epoch (1958-01-01T00:00:00+00:00) or the CCSDS days value exceeds the allowed bit width /// [TimestampError::Cds] if the time is before the CCSDS epoch (1958-01-01T00:00:00+00:00)
/// (24 bits). /// or the CCSDS days value exceeds the allowed bit width (24 bits).
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u24_days(dt: &chrono::DateTime<chrono::Utc>) -> Result<Self, CdsError> { pub fn from_dt_with_u24_days(
dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, TimestampError> {
Self::from_dt_generic(dt, LengthOfDaySegment::Long24Bits) Self::from_dt_generic(dt, LengthOfDaySegment::Long24Bits)
} }
@ -826,13 +835,13 @@ impl CdsTime<DaysLen24Bits> {
/// ///
/// ## Errors /// ## Errors
/// ///
/// This function will return [CdsError::DateBeforeCcsdsEpoch] if the time is before the CCSDS /// This function will return [TimestampError::DateBeforeCcsdsEpoch] or
/// epoch (1958-01-01T00:00:00+00:00) or the CCSDS days value exceeds the allowed bit width /// [TimestampError::Cds] if the time is before the CCSDS epoch (1958-01-01T00:00:00+00:00)
/// (24 bits). /// or the CCSDS days value exceeds the allowed bit width (24 bits).
pub fn from_unix_time_with_u24_day( pub fn from_unix_stamp_with_u24_days(
unix_stamp: &UnixTime, unix_stamp: &UnixTime,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_unix_generic(unix_stamp, LengthOfDaySegment::Long24Bits, submillis_prec) Self::from_unix_generic(unix_stamp, LengthOfDaySegment::Long24Bits, submillis_prec)
} }
@ -840,7 +849,7 @@ impl CdsTime<DaysLen24Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u24_days_us_precision( pub fn from_dt_with_u24_days_us_precision(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_dt_generic_us_prec(dt, LengthOfDaySegment::Long24Bits) Self::from_dt_generic_us_prec(dt, LengthOfDaySegment::Long24Bits)
} }
@ -848,20 +857,22 @@ impl CdsTime<DaysLen24Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u24_days_ps_precision( pub fn from_dt_with_u24_days_ps_precision(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_dt_generic_ps_prec(dt, LengthOfDaySegment::Long24Bits) Self::from_dt_generic_ps_prec(dt, LengthOfDaySegment::Long24Bits)
} }
/// Like [Self::now_with_u24_days] but with microsecond sub-millisecond precision. /// Like [Self::from_now_with_u24_days] but with microsecond sub-millisecond precision.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now_with_u24_days_us_precision() -> Result<Self, StdTimestampError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
Self::now_generic_with_us_prec(LengthOfDaySegment::Long24Bits) pub fn from_now_with_u24_days_us_precision() -> Result<Self, StdTimestampError> {
Self::from_now_generic_us_prec(LengthOfDaySegment::Long24Bits)
} }
/// Like [Self::now_with_u24_days] but with picoseconds sub-millisecond precision. /// Like [Self::from_now_with_u24_days] but with picoseconds sub-millisecond precision.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now_with_u24_days_ps_precision() -> Result<Self, StdTimestampError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
Self::now_generic_with_us_prec(LengthOfDaySegment::Long24Bits) pub fn from_now_with_u24_days_ps_precision() -> Result<Self, StdTimestampError> {
Self::from_now_generic_us_prec(LengthOfDaySegment::Long24Bits)
} }
pub fn from_bytes_with_u24_days(buf: &[u8]) -> Result<Self, TimestampError> { pub fn from_bytes_with_u24_days(buf: &[u8]) -> Result<Self, TimestampError> {
@ -898,33 +909,36 @@ impl CdsTime<DaysLen16Bits> {
Self::generic_new(LengthOfDaySegment::Short16Bits, ccsds_days, ms_of_day).unwrap() Self::generic_new(LengthOfDaySegment::Short16Bits, ccsds_days, ms_of_day).unwrap()
} }
/// Create a provider from a [`chrono::DateTime<Utc>`] struct. /// Create a provider from a [`DateTime<Utc>`] struct.
/// ///
/// This function will return a [CdsError::DateBeforeCcsdsEpoch] if the time is before the /// This function will return a [TimestampError::DateBeforeCcsdsEpoch] or a
/// CCSDS epoch (01-01-1958 00:00:00) or the CCSDS days value exceeds the allowed bit width /// [TimestampError::Cds] if the time is before the CCSDS epoch (01-01-1958 00:00:00) or
/// (16 bits). /// the CCSDS days value exceeds the allowed bit width (16 bits).
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u16_days(dt: &chrono::DateTime<chrono::Utc>) -> Result<Self, CdsError> { pub fn from_dt_with_u16_days(
dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, TimestampError> {
Self::from_dt_generic(dt, LengthOfDaySegment::Short16Bits) Self::from_dt_generic(dt, LengthOfDaySegment::Short16Bits)
} }
/// Generate a time stamp from the current time using the system clock. /// Generate a time stamp from the current time using the system clock.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now_with_u16_days() -> Result<Self, StdTimestampError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
Self::now_generic(LengthOfDaySegment::Short16Bits) pub fn from_now_with_u16_days() -> Result<Self, StdTimestampError> {
Self::from_now_generic(LengthOfDaySegment::Short16Bits)
} }
/// Create a provider from a generic UNIX timestamp (seconds since 1970-01-01T00:00:00+00:00). /// Create a provider from a generic UNIX timestamp (seconds since 1970-01-01T00:00:00+00:00).
/// ///
/// ## Errors /// ## Errors
/// ///
/// This function will return [CdsError::DateBeforeCcsdsEpoch] if the time is before the CCSDS /// This function will return [TimestampError::DateBeforeCcsdsEpoch] or
/// epoch (1958-01-01T00:00:00+00:00) or the CCSDS days value exceeds the allowed bit width /// [TimestampError::Cds] if the time is before the CCSDS epoch (1958-01-01T00:00:00+00:00)
/// (24 bits). /// or the CCSDS days value exceeds the allowed bit width (24 bits).
pub fn from_unix_time_with_u16_days( pub fn from_unix_stamp_with_u16_days(
unix_stamp: &UnixTime, unix_stamp: &UnixTime,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_unix_generic(unix_stamp, LengthOfDaySegment::Short16Bits, submillis_prec) Self::from_unix_generic(unix_stamp, LengthOfDaySegment::Short16Bits, submillis_prec)
} }
@ -932,7 +946,7 @@ impl CdsTime<DaysLen16Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u16_days_us_precision( pub fn from_dt_with_u16_days_us_precision(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_dt_generic_us_prec(dt, LengthOfDaySegment::Short16Bits) Self::from_dt_generic_us_prec(dt, LengthOfDaySegment::Short16Bits)
} }
@ -940,18 +954,20 @@ impl CdsTime<DaysLen16Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn from_dt_with_u16_days_ps_precision( pub fn from_dt_with_u16_days_ps_precision(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
) -> Result<Self, CdsError> { ) -> Result<Self, TimestampError> {
Self::from_dt_generic_ps_prec(dt, LengthOfDaySegment::Short16Bits) Self::from_dt_generic_ps_prec(dt, LengthOfDaySegment::Short16Bits)
} }
/// Like [Self::now_with_u16_days] but with microsecond sub-millisecond precision. /// Like [Self::from_now_with_u16_days] but with microsecond sub-millisecond precision.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now_with_u16_days_us_precision() -> Result<Self, StdTimestampError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
Self::now_generic_with_us_prec(LengthOfDaySegment::Short16Bits) pub fn from_now_with_u16_days_us_precision() -> Result<Self, StdTimestampError> {
Self::from_now_generic_us_prec(LengthOfDaySegment::Short16Bits)
} }
/// Like [Self::now_with_u16_days] but with picosecond sub-millisecond precision. /// Like [Self::from_now_with_u16_days] but with picosecond sub-millisecond precision.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn from_now_with_u16_days_ps_precision() -> Result<Self, StdTimestampError> { pub fn from_now_with_u16_days_ps_precision() -> Result<Self, StdTimestampError> {
Self::from_now_generic_ps_prec(LengthOfDaySegment::Short16Bits) Self::from_now_generic_ps_prec(LengthOfDaySegment::Short16Bits)
} }
@ -1152,7 +1168,7 @@ impl AddAssign<Duration> for CdsTime<DaysLen24Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
impl TryFrom<chrono::DateTime<chrono::Utc>> for CdsTime<DaysLen16Bits> { impl TryFrom<chrono::DateTime<chrono::Utc>> for CdsTime<DaysLen16Bits> {
type Error = CdsError; type Error = TimestampError;
fn try_from(dt: chrono::DateTime<chrono::Utc>) -> Result<Self, Self::Error> { fn try_from(dt: chrono::DateTime<chrono::Utc>) -> Result<Self, Self::Error> {
let conversion = ConversionFromChronoDatetime::new(&dt)?; let conversion = ConversionFromChronoDatetime::new(&dt)?;
@ -1162,7 +1178,8 @@ impl TryFrom<chrono::DateTime<chrono::Utc>> for CdsTime<DaysLen16Bits> {
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
impl TryFrom<chrono::DateTime<chrono::Utc>> for CdsTime<DaysLen24Bits> { impl TryFrom<chrono::DateTime<chrono::Utc>> for CdsTime<DaysLen24Bits> {
type Error = CdsError; type Error = TimestampError;
fn try_from(dt: chrono::DateTime<chrono::Utc>) -> Result<Self, Self::Error> { fn try_from(dt: chrono::DateTime<chrono::Utc>) -> Result<Self, Self::Error> {
let conversion = ConversionFromChronoDatetime::new(&dt)?; let conversion = ConversionFromChronoDatetime::new(&dt)?;
Self::generic_from_conversion(LengthOfDaySegment::Long24Bits, conversion) Self::generic_from_conversion(LengthOfDaySegment::Long24Bits, conversion)
@ -1184,16 +1201,16 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CcsdsTimeProvider for CdsTime<Provides
#[inline] #[inline]
fn unix_secs(&self) -> i64 { fn unix_secs(&self) -> i64 {
self.unix_time.secs self.unix_stamp.secs
} }
#[inline] #[inline]
fn subsec_nanos(&self) -> u32 { fn subsec_nanos(&self) -> u32 {
self.unix_time.subsec_nanos self.unix_stamp.subsec_nanos
} }
#[inline] #[inline]
fn unix_time(&self) -> UnixTime { fn unix_stamp(&self) -> UnixTime {
self.unix_time self.unix_stamp
} }
} }
@ -1340,7 +1357,7 @@ mod tests {
#[test] #[test]
fn test_time_stamp_zero_args() { fn test_time_stamp_zero_args() {
let time_stamper = CdsTime::new_with_u16_days(0, 0); let time_stamper = CdsTime::new_with_u16_days(0, 0);
let unix_stamp = time_stamper.unix_time(); let unix_stamp = time_stamper.unix_stamp();
assert_eq!( assert_eq!(
unix_stamp.secs, unix_stamp.secs,
(DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32) as i64 (DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32) as i64
@ -1369,7 +1386,7 @@ mod tests {
#[test] #[test]
fn test_time_stamp_unix_epoch() { fn test_time_stamp_unix_epoch() {
let time_stamper = CdsTime::new_with_u16_days((-DAYS_CCSDS_TO_UNIX) as u16, 0); let time_stamper = CdsTime::new_with_u16_days((-DAYS_CCSDS_TO_UNIX) as u16, 0);
assert_eq!(time_stamper.unix_time().secs, 0); assert_eq!(time_stamper.unix_stamp().secs, 0);
assert_eq!( assert_eq!(
time_stamper.submillis_precision(), time_stamper.submillis_precision(),
SubmillisPrecision::Absent SubmillisPrecision::Absent
@ -1446,7 +1463,7 @@ mod tests {
fn test_write() { fn test_write() {
let mut buf = [0; 16]; let mut buf = [0; 16];
let time_stamper_0 = CdsTime::new_with_u16_days(0, 0); let time_stamper_0 = CdsTime::new_with_u16_days(0, 0);
let unix_stamp = time_stamper_0.unix_time(); let unix_stamp = time_stamper_0.unix_stamp();
assert_eq!( assert_eq!(
unix_stamp.secs, unix_stamp.secs,
(DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32).into() (DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32).into()
@ -1591,23 +1608,20 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now() { fn test_time_now() {
let timestamp_now = CdsTime::now_with_u16_days().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
generic_now_test(timestamp_now, compare_stamp); generic_now_test(timestamp_now, compare_stamp);
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_us_prec() { fn test_time_now_us_prec() {
let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days_us_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
generic_now_test(timestamp_now, compare_stamp); generic_now_test(timestamp_now, compare_stamp);
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec() { fn test_time_now_ps_prec() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1615,7 +1629,6 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u16_days() { fn test_time_now_ps_prec_u16_days() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
@ -1623,9 +1636,8 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u24_days() { fn test_time_now_ps_prec_u24_days() {
let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap(); let timestamp_now = CdsTime::from_now_with_u24_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now(); let compare_stamp = chrono::Utc::now();
generic_now_test(timestamp_now, compare_stamp); generic_now_test(timestamp_now, compare_stamp);
} }
@ -1910,7 +1922,7 @@ mod tests {
fn test_creation_from_unix_stamp_0_u16_days() { fn test_creation_from_unix_stamp_0_u16_days() {
let unix_secs = 0; let unix_secs = 0;
let subsec_millis = 0; let subsec_millis = 0;
let time_provider = CdsTime::from_unix_time_with_u16_days( let time_provider = CdsTime::from_unix_stamp_with_u16_days(
&UnixTime::new(unix_secs, subsec_millis), &UnixTime::new(unix_secs, subsec_millis),
SubmillisPrecision::Absent, SubmillisPrecision::Absent,
) )
@ -1922,7 +1934,7 @@ mod tests {
fn test_creation_from_unix_stamp_0_u24_days() { fn test_creation_from_unix_stamp_0_u24_days() {
let unix_secs = 0; let unix_secs = 0;
let subsec_millis = 0; let subsec_millis = 0;
let time_provider = CdsTime::from_unix_time_with_u24_day( let time_provider = CdsTime::from_unix_stamp_with_u24_days(
&UnixTime::new(unix_secs, subsec_millis), &UnixTime::new(unix_secs, subsec_millis),
SubmillisPrecision::Absent, SubmillisPrecision::Absent,
) )
@ -1939,8 +1951,10 @@ mod tests {
.unwrap() .unwrap()
.and_local_timezone(chrono::Utc) .and_local_timezone(chrono::Utc)
.unwrap(); .unwrap();
let time_provider = let time_provider = CdsTime::from_unix_stamp_with_u16_days(
CdsTime::from_unix_time_with_u16_days(&datetime_utc.into(), SubmillisPrecision::Absent) &datetime_utc.into(),
SubmillisPrecision::Absent,
)
.expect("creating provider from unix stamp failed"); .expect("creating provider from unix stamp failed");
// https://www.timeanddate.com/date/durationresult.html?d1=01&m1=01&y1=1958&d2=14&m2=01&y2=2023 // https://www.timeanddate.com/date/durationresult.html?d1=01&m1=01&y1=1958&d2=14&m2=01&y2=2023
// Leap years need to be accounted for as well. // Leap years need to be accounted for as well.
@ -1957,7 +1971,7 @@ mod tests {
fn test_creation_0_ccsds_days() { fn test_creation_0_ccsds_days() {
let unix_secs = DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64; let unix_secs = DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64;
let subsec_millis = 0; let subsec_millis = 0;
let time_provider = CdsTime::from_unix_time_with_u16_days( let time_provider = CdsTime::from_unix_stamp_with_u16_days(
&UnixTime::new(unix_secs, subsec_millis), &UnixTime::new(unix_secs, subsec_millis),
SubmillisPrecision::Absent, SubmillisPrecision::Absent,
) )
@ -1969,7 +1983,7 @@ mod tests {
fn test_invalid_creation_from_unix_stamp_days_too_large() { fn test_invalid_creation_from_unix_stamp_days_too_large() {
let invalid_unix_secs: i64 = (u16::MAX as i64 + 1) * SECONDS_PER_DAY as i64; let invalid_unix_secs: i64 = (u16::MAX as i64 + 1) * SECONDS_PER_DAY as i64;
let subsec_millis = 0; let subsec_millis = 0;
match CdsTime::from_unix_time_with_u16_days( match CdsTime::from_unix_stamp_with_u16_days(
&UnixTime::new(invalid_unix_secs, subsec_millis), &UnixTime::new(invalid_unix_secs, subsec_millis),
SubmillisPrecision::Absent, SubmillisPrecision::Absent,
) { ) {
@ -1977,12 +1991,12 @@ mod tests {
panic!("creation should not succeed") panic!("creation should not succeed")
} }
Err(e) => { Err(e) => {
if let CdsError::InvalidCcsdsDays(days) = e { if let TimestampError::Cds(CdsError::InvalidCcsdsDays(days)) = e {
assert_eq!( assert_eq!(
days, days,
unix_to_ccsds_days(invalid_unix_secs / SECONDS_PER_DAY as i64) unix_to_ccsds_days(invalid_unix_secs / SECONDS_PER_DAY as i64)
); );
assert_eq!(e.to_string(), "invalid ccsds days 69919"); assert_eq!(e.to_string(), "cds error: invalid ccsds days 69919");
} else { } else {
panic!("unexpected error {}", e) panic!("unexpected error {}", e)
} }
@ -1996,7 +2010,7 @@ mod tests {
// precisely 31-12-1957 23:59:55 // precisely 31-12-1957 23:59:55
let unix_secs = DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32 - 5; let unix_secs = DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32 - 5;
let subsec_millis = 0; let subsec_millis = 0;
match CdsTime::from_unix_time_with_u16_days( match CdsTime::from_unix_stamp_with_u16_days(
&UnixTime::new(unix_secs as i64, subsec_millis), &UnixTime::new(unix_secs as i64, subsec_millis),
SubmillisPrecision::Absent, SubmillisPrecision::Absent,
) { ) {
@ -2004,8 +2018,8 @@ mod tests {
panic!("creation should not succeed") panic!("creation should not succeed")
} }
Err(e) => { Err(e) => {
if let CdsError::DateBeforeCcsdsEpoch(DateBeforeCcsdsEpochError(unix_dt)) = e { if let TimestampError::DateBeforeCcsdsEpoch(dt) = e {
let dt = unix_dt.chrono_date_time(); let dt = dt.chrono_date_time();
if let chrono::LocalResult::Single(dt) = dt { if let chrono::LocalResult::Single(dt) = dt {
assert_eq!(dt.year(), 1957); assert_eq!(dt.year(), 1957);
assert_eq!(dt.month(), 12); assert_eq!(dt.month(), 12);
@ -2235,9 +2249,7 @@ mod tests {
.unwrap(); .unwrap();
let time_provider = CdsTime::from_dt_with_u24_days(&datetime_utc); let time_provider = CdsTime::from_dt_with_u24_days(&datetime_utc);
assert!(time_provider.is_err()); assert!(time_provider.is_err());
if let CdsError::DateBeforeCcsdsEpoch(DateBeforeCcsdsEpochError(dt)) = if let TimestampError::DateBeforeCcsdsEpoch(dt) = time_provider.unwrap_err() {
time_provider.unwrap_err()
{
assert_eq!(dt, datetime_utc.into()); assert_eq!(dt, datetime_utc.into());
} }
} }
@ -2280,11 +2292,10 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_update_from_now() { fn test_update_from_now() {
let mut stamp = CdsTime::new_with_u16_days(0, 0); let mut stamp = CdsTime::new_with_u16_days(0, 0);
let _ = stamp.update_from_now(); let _ = stamp.update_from_now();
let dt = stamp.unix_time().chrono_date_time().unwrap(); let dt = stamp.unix_stamp().chrono_date_time().unwrap();
assert!(dt.year() > 2020); assert!(dt.year() > 2020);
} }
@ -2296,9 +2307,8 @@ mod tests {
#[test] #[test]
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
#[cfg_attr(miri, ignore)]
fn test_serialization() { fn test_serialization() {
let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time"); let stamp_now = CdsTime::from_now_with_u16_days().expect("Error retrieving time");
let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed"); let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed");
assert!(val.len() > 0); assert!(val.len() > 0);
let stamp_deser: CdsTime = from_bytes(&val).expect("Stamp deserialization failed"); let stamp_deser: CdsTime = from_bytes(&val).expect("Stamp deserialization failed");

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,12 @@
//! CCSDS Time Code Formats according to [CCSDS 301.0-B-4](https://public.ccsds.org/Pubs/301x0b4e1.pdf) //! CCSDS Time Code Formats according to [CCSDS 301.0-B-4](https://public.ccsds.org/Pubs/301x0b4e1.pdf)
use crate::ByteConversionError; use crate::ByteConversionError;
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
use chrono::{TimeZone, Utc}; use chrono::{DateTime, LocalResult, TimeZone, Utc};
use core::cmp::Ordering; use core::cmp::Ordering;
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
use core::ops::{Add, AddAssign, Sub}; use core::ops::{Add, AddAssign, Sub};
use core::time::Duration; use core::time::Duration;
use core::u8;
#[allow(unused_imports)] #[allow(unused_imports)]
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
@ -32,7 +33,6 @@ pub const NANOS_PER_SECOND: u32 = 1_000_000_000;
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum CcsdsTimeCode { pub enum CcsdsTimeCode {
CucCcsdsEpoch = 0b001, CucCcsdsEpoch = 0b001,
CucAgencyEpoch = 0b010, CucAgencyEpoch = 0b010,
@ -63,21 +63,15 @@ pub fn ccsds_time_code_from_p_field(pfield: u8) -> Result<CcsdsTimeCode, u8> {
CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits) CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits)
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("date before ccsds epoch: {0:?}")]
pub struct DateBeforeCcsdsEpochError(UnixTime);
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[non_exhaustive] #[non_exhaustive]
pub enum TimestampError { pub enum TimestampError {
InvalidTimeCode { expected: CcsdsTimeCode, found: u8 }, InvalidTimeCode { expected: CcsdsTimeCode, found: u8 },
ByteConversion(ByteConversionError), ByteConversion(ByteConversionError),
Cds(cds::CdsError), Cds(cds::CdsError),
Cuc(cuc::CucError), Cuc(cuc::CucError),
DateBeforeCcsdsEpoch(UnixTime),
CustomEpochNotSupported, CustomEpochNotSupported,
} }
@ -99,6 +93,9 @@ impl Display for TimestampError {
TimestampError::ByteConversion(e) => { TimestampError::ByteConversion(e) => {
write!(f, "time stamp: {e}") write!(f, "time stamp: {e}")
} }
TimestampError::DateBeforeCcsdsEpoch(e) => {
write!(f, "datetime with date before ccsds epoch: {e:?}")
}
TimestampError::CustomEpochNotSupported => { TimestampError::CustomEpochNotSupported => {
write!(f, "custom epochs are not supported") write!(f, "custom epochs are not supported")
} }
@ -117,7 +114,6 @@ impl Error for TimestampError {
} }
} }
} }
impl From<cds::CdsError> for TimestampError { impl From<cds::CdsError> for TimestampError {
fn from(e: cds::CdsError) -> Self { fn from(e: cds::CdsError) -> Self {
TimestampError::Cds(e) TimestampError::Cds(e)
@ -131,6 +127,7 @@ impl From<cuc::CucError> for TimestampError {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub mod std_mod { pub mod std_mod {
use crate::time::TimestampError; use crate::time::TimestampError;
use std::time::SystemTimeError; use std::time::SystemTimeError;
@ -138,7 +135,7 @@ pub mod std_mod {
#[derive(Debug, Clone, Error)] #[derive(Debug, Clone, Error)]
pub enum StdTimestampError { pub enum StdTimestampError {
#[error("system time error: {0:?}")] #[error("system time error: {0}")]
SystemTime(#[from] SystemTimeError), SystemTime(#[from] SystemTimeError),
#[error("timestamp error: {0}")] #[error("timestamp error: {0}")]
Timestamp(#[from] TimestampError), Timestamp(#[from] TimestampError),
@ -146,6 +143,7 @@ pub mod std_mod {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn seconds_since_epoch() -> f64 { pub fn seconds_since_epoch() -> f64 {
SystemTime::now() SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH) .duration_since(SystemTime::UNIX_EPOCH)
@ -157,7 +155,6 @@ pub fn seconds_since_epoch() -> f64 {
/// ///
/// - CCSDS epoch: 1958-01-01T00:00:00+00:00 /// - CCSDS epoch: 1958-01-01T00:00:00+00:00
/// - UNIX Epoch: 1970-01-01T00:00:00+00:00 /// - UNIX Epoch: 1970-01-01T00:00:00+00:00
#[inline]
pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 { pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 {
unix_days - DAYS_CCSDS_TO_UNIX as i64 unix_days - DAYS_CCSDS_TO_UNIX as i64
} }
@ -166,24 +163,22 @@ pub const fn unix_to_ccsds_days(unix_days: i64) -> i64 {
/// ///
/// - CCSDS epoch: 1958-01-01T00:00:00+00:00 /// - CCSDS epoch: 1958-01-01T00:00:00+00:00
/// - UNIX Epoch: 1970-01-01T00:00:00+00:00 /// - UNIX Epoch: 1970-01-01T00:00:00+00:00
#[inline]
pub const fn ccsds_to_unix_days(ccsds_days: i64) -> i64 { pub const fn ccsds_to_unix_days(ccsds_days: i64) -> i64 {
ccsds_days + DAYS_CCSDS_TO_UNIX as i64 ccsds_days + DAYS_CCSDS_TO_UNIX as i64
} }
/// Similar to [unix_to_ccsds_days] but converts the epoch instead, which is the number of elpased /// Similar to [unix_to_ccsds_days] but converts the epoch instead, which is the number of elpased
/// seconds since the CCSDS and UNIX epoch times. /// seconds since the CCSDS and UNIX epoch times.
#[inline]
pub const fn unix_epoch_to_ccsds_epoch(unix_epoch: i64) -> i64 { pub const fn unix_epoch_to_ccsds_epoch(unix_epoch: i64) -> i64 {
unix_epoch - (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64) unix_epoch - (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64)
} }
#[inline]
pub const fn ccsds_epoch_to_unix_epoch(ccsds_epoch: i64) -> i64 { pub const fn ccsds_epoch_to_unix_epoch(ccsds_epoch: i64) -> i64 {
ccsds_epoch + (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64) ccsds_epoch + (DAYS_CCSDS_TO_UNIX as i64 * SECONDS_PER_DAY as i64)
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn ms_of_day_using_sysclock() -> u32 { pub fn ms_of_day_using_sysclock() -> u32 {
ms_of_day(seconds_since_epoch()) ms_of_day(seconds_since_epoch())
} }
@ -204,6 +199,7 @@ pub trait TimeWriter {
fn write_to_bytes(&self, bytes: &mut [u8]) -> Result<usize, TimestampError>; fn write_to_bytes(&self, bytes: &mut [u8]) -> Result<usize, TimestampError>;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
fn to_vec(&self) -> Result<alloc::vec::Vec<u8>, TimestampError> { fn to_vec(&self) -> Result<alloc::vec::Vec<u8>, TimestampError> {
let mut vec = alloc::vec![0; self.len_written()]; let mut vec = alloc::vec![0; self.len_written()];
self.write_to_bytes(&mut vec)?; self.write_to_bytes(&mut vec)?;
@ -237,16 +233,18 @@ pub trait CcsdsTimeProvider {
(self.subsec_nanos() / 1_000_000) as u16 (self.subsec_nanos() / 1_000_000) as u16
} }
fn unix_time(&self) -> UnixTime { fn unix_stamp(&self) -> UnixTime {
UnixTime::new(self.unix_secs(), self.subsec_nanos()) UnixTime::new(self.unix_secs(), self.subsec_nanos())
} }
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "chrono")))]
fn chrono_date_time(&self) -> chrono::LocalResult<chrono::DateTime<chrono::Utc>> { fn chrono_date_time(&self) -> chrono::LocalResult<chrono::DateTime<chrono::Utc>> {
chrono::Utc.timestamp_opt(self.unix_secs(), self.subsec_nanos()) chrono::Utc.timestamp_opt(self.unix_secs(), self.subsec_nanos())
} }
#[cfg(feature = "timelib")] #[cfg(feature = "timelib")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "timelib")))]
fn timelib_date_time(&self) -> Result<time::OffsetDateTime, time::error::ComponentRange> { fn timelib_date_time(&self) -> Result<time::OffsetDateTime, time::error::ComponentRange> {
Ok(time::OffsetDateTime::from_unix_timestamp(self.unix_secs())? Ok(time::OffsetDateTime::from_unix_timestamp(self.unix_secs())?
+ time::Duration::nanoseconds(self.subsec_nanos().into())) + time::Duration::nanoseconds(self.subsec_nanos().into()))
@ -260,7 +258,6 @@ pub trait CcsdsTimeProvider {
/// similarly to other common time formats and libraries. /// similarly to other common time formats and libraries.
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] #[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct UnixTime { pub struct UnixTime {
secs: i64, secs: i64,
subsec_nanos: u32, subsec_nanos: u32,
@ -304,7 +301,7 @@ impl UnixTime {
} }
/// This function will panic if the subsecond value is larger than the fraction of a second. /// This function will panic if the subsecond value is larger than the fraction of a second.
/// Use [Self::new_checked] if you want to handle this case without a panic. /// Use [new_checked] if you want to handle this case without a panic.
pub const fn new(unix_seconds: i64, subsecond_nanos: u32) -> Self { pub const fn new(unix_seconds: i64, subsecond_nanos: u32) -> Self {
if subsecond_nanos >= NANOS_PER_SECOND { if subsecond_nanos >= NANOS_PER_SECOND {
panic!("invalid subsecond nanos value"); panic!("invalid subsecond nanos value");
@ -316,7 +313,7 @@ impl UnixTime {
} }
/// This function will panic if the subsecond value is larger than the fraction of a second. /// This function will panic if the subsecond value is larger than the fraction of a second.
/// Use [Self::new_subsec_millis_checked] if you want to handle this case without a panic. /// Use [new_subsecond_millis_checked] if you want to handle this case without a panic.
pub const fn new_subsec_millis(unix_seconds: i64, subsecond_millis: u16) -> Self { pub const fn new_subsec_millis(unix_seconds: i64, subsecond_millis: u16) -> Self {
if subsecond_millis >= 1000 { if subsecond_millis >= 1000 {
panic!("invalid subsecond millisecond value"); panic!("invalid subsecond millisecond value");
@ -344,7 +341,8 @@ impl UnixTime {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn now() -> Result<Self, SystemTimeError> { #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub fn from_now() -> Result<Self, SystemTimeError> {
let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?; let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?;
let epoch = now.as_secs(); let epoch = now.as_secs();
Ok(Self::new(epoch as i64, now.subsec_nanos())) Ok(Self::new(epoch as i64, now.subsec_nanos()))
@ -355,49 +353,43 @@ impl UnixTime {
self.secs as f64 + (self.subsec_nanos as f64 / 1_000_000_000.0) self.secs as f64 + (self.subsec_nanos as f64 / 1_000_000_000.0)
} }
pub fn as_secs(&self) -> i64 { pub fn secs(&self) -> i64 {
self.secs self.secs
} }
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
pub fn chrono_date_time(&self) -> chrono::LocalResult<chrono::DateTime<chrono::Utc>> { #[cfg_attr(doc_cfg, doc(cfg(feature = "chrono")))]
pub fn chrono_date_time(&self) -> LocalResult<DateTime<Utc>> {
Utc.timestamp_opt(self.secs, self.subsec_nanos) Utc.timestamp_opt(self.secs, self.subsec_nanos)
} }
#[cfg(feature = "timelib")] #[cfg(feature = "timelib")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "timelib")))]
pub fn timelib_date_time(&self) -> Result<time::OffsetDateTime, time::error::ComponentRange> { pub fn timelib_date_time(&self) -> Result<time::OffsetDateTime, time::error::ComponentRange> {
Ok(time::OffsetDateTime::from_unix_timestamp(self.as_secs())? Ok(time::OffsetDateTime::from_unix_timestamp(self.secs())?
+ time::Duration::nanoseconds(self.subsec_nanos().into())) + time::Duration::nanoseconds(self.subsec_nanos().into()))
} }
// Calculate the difference in milliseconds between two UnixTimestamps // Calculate the difference in milliseconds between two UnixTimestamps
pub fn diff_in_millis(&self, other: &UnixTime) -> Option<i64> { pub fn diff_in_millis(&self, other: &UnixTime) -> i64 {
let seconds_difference = self.secs.checked_sub(other.secs)?; let seconds_difference = self.secs - other.secs;
// Convert seconds difference to milliseconds // Convert seconds difference to milliseconds
let milliseconds_difference = seconds_difference.checked_mul(1000)?; let milliseconds_difference = seconds_difference * 1000;
// Calculate the difference in subsecond milliseconds directly // Calculate the difference in subsecond milliseconds directly
let subsecond_difference_nanos = self.subsec_nanos as i64 - other.subsec_nanos as i64; let subsecond_difference_nanos = self.subsec_nanos as i64 - other.subsec_nanos as i64;
// Combine the differences // Combine the differences
Some(milliseconds_difference + (subsecond_difference_nanos / 1_000_000)) milliseconds_difference + (subsecond_difference_nanos / 1_000_000)
} }
} }
#[cfg(feature = "chrono")] impl From<DateTime<Utc>> for UnixTime {
impl From<chrono::DateTime<chrono::Utc>> for UnixTime { fn from(value: DateTime<Utc>) -> Self {
fn from(value: chrono::DateTime<chrono::Utc>) -> Self {
Self::new(value.timestamp(), value.timestamp_subsec_nanos()) Self::new(value.timestamp(), value.timestamp_subsec_nanos())
} }
} }
#[cfg(feature = "timelib")]
impl From<time::OffsetDateTime> for UnixTime {
fn from(value: time::OffsetDateTime) -> Self {
Self::new(value.unix_timestamp(), value.nanosecond())
}
}
impl PartialOrd for UnixTime { impl PartialOrd for UnixTime {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
@ -445,11 +437,11 @@ pub struct StampDiff {
} }
impl Sub for UnixTime { impl Sub for UnixTime {
type Output = Option<StampDiff>; type Output = StampDiff;
fn sub(self, rhs: Self) -> Self::Output { fn sub(self, rhs: Self) -> Self::Output {
let difference = self.diff_in_millis(&rhs)?; let difference = self.diff_in_millis(&rhs);
Some(if difference < 0 { if difference < 0 {
StampDiff { StampDiff {
positive_duration: false, positive_duration: false,
duration_absolute: Duration::from_millis(-difference as u64), duration_absolute: Duration::from_millis(-difference as u64),
@ -459,7 +451,7 @@ impl Sub for UnixTime {
positive_duration: true, positive_duration: true,
duration_absolute: Duration::from_millis(difference as u64), duration_absolute: Duration::from_millis(difference as u64),
} }
}) }
} }
} }
@ -542,7 +534,6 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_get_current_time() { fn test_get_current_time() {
let sec_floats = seconds_since_epoch(); let sec_floats = seconds_since_epoch();
assert!(sec_floats > 0.0); assert!(sec_floats > 0.0);
@ -557,7 +548,6 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_ccsds_epoch() { fn test_ccsds_epoch() {
let now = SystemTime::now() let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH) .duration_since(SystemTime::UNIX_EPOCH)
@ -650,7 +640,7 @@ mod tests {
fn test_addition() { fn test_addition() {
let mut stamp0 = UnixTime::new_only_secs(1); let mut stamp0 = UnixTime::new_only_secs(1);
stamp0 += Duration::from_secs(5); stamp0 += Duration::from_secs(5);
assert_eq!(stamp0.as_secs(), 6); assert_eq!(stamp0.secs(), 6);
assert_eq!(stamp0.subsec_millis(), 0); assert_eq!(stamp0.subsec_millis(), 0);
let stamp1 = stamp0 + Duration::from_millis(500); let stamp1 = stamp0 + Duration::from_millis(500);
assert_eq!(stamp1.secs, 6); assert_eq!(stamp1.secs, 6);
@ -678,9 +668,8 @@ mod tests {
} }
#[test] #[test]
#[cfg_attr(miri, ignore)]
fn test_from_now() { fn test_from_now() {
let stamp_now = UnixTime::now().unwrap(); let stamp_now = UnixTime::from_now().unwrap();
let dt_now = stamp_now.chrono_date_time().unwrap(); let dt_now = stamp_now.chrono_date_time().unwrap();
assert!(dt_now.year() >= 2020); assert!(dt_now.year() >= 2020);
} }
@ -691,7 +680,7 @@ mod tests {
let StampDiff { let StampDiff {
positive_duration, positive_duration,
duration_absolute, duration_absolute,
} = (stamp_later - UnixTime::new(1, 0)).expect("stamp diff error"); } = stamp_later - UnixTime::new(1, 0);
assert!(positive_duration); assert!(positive_duration);
assert_eq!(duration_absolute, Duration::from_secs(1)); assert_eq!(duration_absolute, Duration::from_secs(1));
} }
@ -703,7 +692,7 @@ mod tests {
let StampDiff { let StampDiff {
positive_duration, positive_duration,
duration_absolute, duration_absolute,
} = (stamp_later - stamp_earlier).expect("stamp diff error"); } = stamp_later - stamp_earlier;
assert!(positive_duration); assert!(positive_duration);
assert_eq!(duration_absolute, Duration::from_millis(1900)); assert_eq!(duration_absolute, Duration::from_millis(1900));
} }
@ -715,7 +704,7 @@ mod tests {
let StampDiff { let StampDiff {
positive_duration, positive_duration,
duration_absolute, duration_absolute,
} = (stamp_earlier - stamp_later).expect("stamp diff error"); } = stamp_earlier - stamp_later;
assert!(!positive_duration); assert!(!positive_duration);
assert_eq!(duration_absolute, Duration::from_millis(1900)); assert_eq!(duration_absolute, Duration::from_millis(1900));
} }
@ -750,13 +739,4 @@ mod tests {
assert_eq!(timelib_dt.minute(), 0); assert_eq!(timelib_dt.minute(), 0);
assert_eq!(timelib_dt.second(), 0); assert_eq!(timelib_dt.second(), 0);
} }
#[test]
#[cfg(feature = "timelib")]
fn test_unix_stamp_from_timelib_datetime() {
let timelib_dt = time::OffsetDateTime::UNIX_EPOCH;
let unix_time = UnixTime::from(timelib_dt);
let timelib_converted_back = unix_time.timelib_date_time().unwrap();
assert_eq!(timelib_dt, timelib_converted_back);
}
} }

View File

@ -15,12 +15,10 @@ pub trait ToBeBytes {
impl ToBeBytes for () { impl ToBeBytes for () {
type ByteArray = [u8; 0]; type ByteArray = [u8; 0];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
0 0
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
[] []
} }
@ -29,12 +27,9 @@ impl ToBeBytes for () {
impl ToBeBytes for u8 { impl ToBeBytes for u8 {
type ByteArray = [u8; 1]; type ByteArray = [u8; 1];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
1 1
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u8::to_be_bytes(*self) u8::to_be_bytes(*self)
} }
@ -43,12 +38,9 @@ impl ToBeBytes for u8 {
impl ToBeBytes for u16 { impl ToBeBytes for u16 {
type ByteArray = [u8; 2]; type ByteArray = [u8; 2];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
2 2
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u16::to_be_bytes(*self) u16::to_be_bytes(*self)
} }
@ -57,12 +49,9 @@ impl ToBeBytes for u16 {
impl ToBeBytes for u32 { impl ToBeBytes for u32 {
type ByteArray = [u8; 4]; type ByteArray = [u8; 4];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
4 4
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u32::to_be_bytes(*self) u32::to_be_bytes(*self)
} }
@ -71,12 +60,9 @@ impl ToBeBytes for u32 {
impl ToBeBytes for u64 { impl ToBeBytes for u64 {
type ByteArray = [u8; 8]; type ByteArray = [u8; 8];
#[inline]
fn written_len(&self) -> usize { fn written_len(&self) -> usize {
8 8
} }
#[inline]
fn to_be_bytes(&self) -> Self::ByteArray { fn to_be_bytes(&self) -> Self::ByteArray {
u64::to_be_bytes(*self) u64::to_be_bytes(*self)
} }
@ -91,6 +77,7 @@ pub trait UnsignedEnum {
fn value(&self) -> u64; fn value(&self) -> u64;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
fn to_vec(&self) -> alloc::vec::Vec<u8> { fn to_vec(&self) -> alloc::vec::Vec<u8> {
let mut buf = alloc::vec![0; self.size()]; let mut buf = alloc::vec![0; self.size()];
self.write_to_be_bytes(&mut buf).unwrap(); self.write_to_be_bytes(&mut buf).unwrap();
@ -118,7 +105,6 @@ pub enum UnsignedByteFieldError {
} }
impl From<ByteConversionError> for UnsignedByteFieldError { impl From<ByteConversionError> for UnsignedByteFieldError {
#[inline]
fn from(value: ByteConversionError) -> Self { fn from(value: ByteConversionError) -> Self {
Self::ByteConversionError(value) Self::ByteConversionError(value)
} }
@ -146,24 +132,20 @@ impl Error for UnsignedByteFieldError {}
/// Type erased variant. /// Type erased variant.
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct UnsignedByteField { pub struct UnsignedByteField {
width: usize, width: usize,
value: u64, value: u64,
} }
impl UnsignedByteField { impl UnsignedByteField {
#[inline]
pub const fn new(width: usize, value: u64) -> Self { pub const fn new(width: usize, value: u64) -> Self {
Self { width, value } Self { width, value }
} }
#[inline]
pub const fn value_const(&self) -> u64 { pub const fn value_const(&self) -> u64 {
self.value self.value
} }
#[inline]
pub fn new_from_be_bytes(width: usize, buf: &[u8]) -> Result<Self, UnsignedByteFieldError> { pub fn new_from_be_bytes(width: usize, buf: &[u8]) -> Result<Self, UnsignedByteFieldError> {
if width > buf.len() { if width > buf.len() {
return Err(ByteConversionError::FromSliceTooSmall { return Err(ByteConversionError::FromSliceTooSmall {
@ -196,12 +178,10 @@ impl UnsignedByteField {
} }
impl UnsignedEnum for UnsignedByteField { impl UnsignedEnum for UnsignedByteField {
#[inline]
fn size(&self) -> usize { fn size(&self) -> usize {
self.width self.width
} }
#[inline]
fn value(&self) -> u64 { fn value(&self) -> u64 {
self.value_const() self.value_const()
} }
@ -241,7 +221,6 @@ impl UnsignedEnum for UnsignedByteField {
#[derive(Debug, Copy, Clone, Eq, PartialEq)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct GenericUnsignedByteField<TYPE: Copy + Into<u64>> { pub struct GenericUnsignedByteField<TYPE: Copy + Into<u64>> {
value: TYPE, value: TYPE,
} }
@ -257,7 +236,6 @@ impl<TYPE: Copy + Into<u64>> GenericUnsignedByteField<TYPE> {
} }
impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteField<TYPE> { impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteField<TYPE> {
#[inline]
fn size(&self) -> usize { fn size(&self) -> usize {
self.value.written_len() self.value.written_len()
} }
@ -273,7 +251,6 @@ impl<TYPE: Copy + ToBeBytes + Into<u64>> UnsignedEnum for GenericUnsignedByteFie
Ok(self.value.written_len()) Ok(self.value.written_len())
} }
#[inline]
fn value(&self) -> u64 { fn value(&self) -> u64 {
self.value_typed().into() self.value_typed().into()
} }
@ -299,7 +276,6 @@ impl From<UnsignedByteFieldU8> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 1 { if value.width != 1 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -312,7 +288,6 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU8 {
} }
impl From<UnsignedByteFieldU16> for UnsignedByteField { impl From<UnsignedByteFieldU16> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU16) -> Self { fn from(value: UnsignedByteFieldU16) -> Self {
Self::new(2, value.value as u64) Self::new(2, value.value as u64)
} }
@ -321,7 +296,6 @@ impl From<UnsignedByteFieldU16> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 2 { if value.width != 2 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -334,7 +308,6 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU16 {
} }
impl From<UnsignedByteFieldU32> for UnsignedByteField { impl From<UnsignedByteFieldU32> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU32) -> Self { fn from(value: UnsignedByteFieldU32) -> Self {
Self::new(4, value.value as u64) Self::new(4, value.value as u64)
} }
@ -343,7 +316,6 @@ impl From<UnsignedByteFieldU32> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 4 { if value.width != 4 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {
@ -356,7 +328,6 @@ impl TryFrom<UnsignedByteField> for UnsignedByteFieldU32 {
} }
impl From<UnsignedByteFieldU64> for UnsignedByteField { impl From<UnsignedByteFieldU64> for UnsignedByteField {
#[inline]
fn from(value: UnsignedByteFieldU64) -> Self { fn from(value: UnsignedByteFieldU64) -> Self {
Self::new(8, value.value) Self::new(8, value.value)
} }
@ -365,7 +336,6 @@ impl From<UnsignedByteFieldU64> for UnsignedByteField {
impl TryFrom<UnsignedByteField> for UnsignedByteFieldU64 { impl TryFrom<UnsignedByteField> for UnsignedByteFieldU64 {
type Error = UnsignedByteFieldError; type Error = UnsignedByteFieldError;
#[inline]
fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> { fn try_from(value: UnsignedByteField) -> Result<Self, Self::Error> {
if value.width != 8 { if value.width != 8 {
return Err(UnsignedByteFieldError::InvalidWidth { return Err(UnsignedByteFieldError::InvalidWidth {