Compare commits
59 Commits
Author | SHA1 | Date | |
---|---|---|---|
a03d26a49c | |||
026173514f | |||
2d7ccc0909 | |||
05d3bac927 | |||
d58df5fee2 | |||
9d23ac5b9b | |||
c0b4653c01 | |||
f156833985 | |||
9aea3dba00 | |||
48247a0a87 | |||
f70b957d9a | |||
fbf953df0e | |||
f135d54364 | |||
d8b2a3dfea | |||
448b76be91 | |||
027b01f00f | |||
bf15b22889 | |||
16f91b562d | |||
cd77b806fe | |||
43c88da3f2 | |||
b19a61b859 | |||
8aa957b8bb | |||
190fa1befc | |||
175b61deca | |||
51c28b5cc6 | |||
45cc74daa7 | |||
191c6f8146 | |||
5449884b2e | |||
9c93c76193 | |||
043927c7ef | |||
f4dc5a0302 | |||
9166faa4ae | |||
ed808e69d4 | |||
d146b6cf57 | |||
ff0c9d8c70 | |||
c40bc855a2 | |||
81423fc6e8 | |||
a399b11a8e | |||
9d4c7446a3 | |||
b87f7d73b1 | |||
80744eea16 | |||
a5918bfd4a | |||
0e347b0e37 | |||
58dabb6f2f | |||
7fd65aa592 | |||
0024afc83e | |||
c48bd848d3 | |||
b8be9ae641 | |||
c2506dbba9 | |||
b842b9d11a | |||
374c034e92 | |||
791c7f6e02 | |||
8001938507 | |||
73ab7ff148 | |||
c59d01174f | |||
eb49bff0c9 | |||
af392d40d0 | |||
b78bfe2114 | |||
69a3b1d8f3 |
115
.github/workflows/ci.yml
vendored
115
.github/workflows/ci.yml
vendored
@ -1,42 +1,39 @@
|
||||
on: [push]
|
||||
|
||||
name: ci
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
name: Check build
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --release
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo check --release
|
||||
|
||||
msrv:
|
||||
name: Check with MSRV
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.65.0
|
||||
override: true
|
||||
profile: minimal
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --release
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- run: cargo nextest run --all-features
|
||||
- run: cargo test --doc
|
||||
|
||||
msrv:
|
||||
name: Check MSRV
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.81.0
|
||||
- run: cargo check --release
|
||||
|
||||
cross-check:
|
||||
name: Check Cross
|
||||
name: Check Cross-Compilation
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@ -44,70 +41,32 @@ jobs:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- thumbv7em-none-eabihf
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: true
|
||||
command: check
|
||||
args: --release --target=${{ matrix.target }} --no-default-features
|
||||
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
|
||||
- run: cargo check --release --target=${{matrix.target}} --no-default-features
|
||||
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
name: Check formatting
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo fmt --all -- --check
|
||||
|
||||
check-doc:
|
||||
docs:
|
||||
name: Check Documentation Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: doc
|
||||
args: --all-features
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
- run: RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
- run: rustup component add clippy
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
ci:
|
||||
if: ${{ success() }}
|
||||
# all new jobs must be added to this list
|
||||
needs: [check, fmt, clippy]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: CI succeeded
|
||||
run: exit 0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
55
CHANGELOG.md
55
CHANGELOG.md
@ -8,6 +8,61 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [unreleased]
|
||||
|
||||
# [v0.13.0] 2024-11-08
|
||||
|
||||
- Bumped MSRV to 1.81.0
|
||||
- Bump `zerocopy` to v0.8.0
|
||||
- Bump `thiserror` to v2.0.0
|
||||
|
||||
## Changed
|
||||
|
||||
- Migrated all Error implementations to thiserror, improved some naming and error handling in
|
||||
general
|
||||
|
||||
# [v0.12.0] 2024-09-10
|
||||
|
||||
- Bumped MSRV to 1.70.0
|
||||
|
||||
## Added
|
||||
|
||||
- Added new `cfdp::tlv::TlvOwned` type which erases the lifetime and is clonable.
|
||||
- Dedicated `cfdp::tlv::TlvLvDataTooLarge` error struct for APIs where this is the only possible
|
||||
API error.
|
||||
- Added File Data PDU API which expects the expected file data size and then exposes the unwritten
|
||||
file data field as a mutable slice. This allows to read data from the virtual file system
|
||||
API to the file data buffer without an intermediate buffer.
|
||||
- Generic `EofPdu::new` constructor.
|
||||
- Added generic sequence counter module.
|
||||
- Added `MsgToUserTlv::to_tlv` converter which reduced the type and converts
|
||||
it to a generic `Tlv`.
|
||||
- Implemented `From<MsgToUserTlv> for Tlv` converter trait.
|
||||
- Added CFDP maximum file segment length calculator method `calculate_max_file_seg_len_for_max_packet_len_and_pdu_header`
|
||||
|
||||
## Added and Changed
|
||||
|
||||
- Added new `ReadableTlv` to avoid some boilerplate code and have a common abstraction implemented
|
||||
for both `Tlv` and `TlvOwned` to read the raw TLV data field and its length.
|
||||
- Replaced `cfdp::tlv::TlvLvError` by `cfdp::tlv::TlvLvDataTooLarge` where applicable.
|
||||
|
||||
## Fixed
|
||||
|
||||
- Fixed an error in the EOF writer which wrote the fault location to the wrong buffer position.
|
||||
- cfdp `ConditionCode::CheckLimitReached` previous had the wrong numerical value of `0b1001` (9)
|
||||
and now has the correct value of `0b1010` (10).
|
||||
|
||||
## Changed
|
||||
|
||||
- Minor documentation build updates.
|
||||
- Increased delegate version range to v0.13
|
||||
|
||||
# [v0.11.2] 2024-05-19
|
||||
|
||||
- Bumped MSRV to 1.68.2
|
||||
|
||||
## Fixed
|
||||
|
||||
- Removed `defmt::Format` impl for `MetadataPduCreator` which seems to be problematic.
|
||||
|
||||
# [v0.11.1] 2024-04-22
|
||||
|
||||
## Fixed
|
||||
|
31
Cargo.toml
31
Cargo.toml
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "spacepackets"
|
||||
version = "0.11.1"
|
||||
version = "0.13.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.65"
|
||||
rust-version = "1.81.0"
|
||||
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
|
||||
description = "Generic implementations for various CCSDS and ECSS packet standards"
|
||||
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
|
||||
@ -14,15 +14,16 @@ categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-sup
|
||||
|
||||
[dependencies]
|
||||
crc = "3"
|
||||
delegate = ">=0.8, <0.11"
|
||||
delegate = ">=0.8, <=0.13"
|
||||
paste = "1"
|
||||
|
||||
[dependencies.zerocopy]
|
||||
version = "0.7"
|
||||
version = "0.8"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.thiserror]
|
||||
version = "1"
|
||||
optional = true
|
||||
version = "2"
|
||||
default-features = false
|
||||
|
||||
[dependencies.num_enum]
|
||||
version = ">0.5, <=0.7"
|
||||
@ -52,19 +53,17 @@ default-features = false
|
||||
version = "0.3"
|
||||
optional = true
|
||||
|
||||
[dev-dependencies]
|
||||
postcard = "1"
|
||||
chrono = "0.4"
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = ["chrono/std", "chrono/clock", "alloc", "thiserror"]
|
||||
serde = ["dep:serde", "chrono/serde"]
|
||||
alloc = ["postcard/alloc", "chrono/alloc"]
|
||||
chrono = ["dep:chrono"]
|
||||
std = ["alloc", "chrono/std", "chrono/clock", "thiserror/std"]
|
||||
serde = ["dep:serde", "chrono?/serde"]
|
||||
alloc = ["chrono?/alloc", "defmt?/alloc", "serde?/alloc"]
|
||||
timelib = ["dep:time"]
|
||||
defmt = ["dep:defmt"]
|
||||
|
||||
[dev-dependencies]
|
||||
postcard = { version = "1", features = ["alloc"] }
|
||||
chrono = "0.4"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docs_rs", "--generate-link-to-definition"]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
10
README.md
10
README.md
@ -61,3 +61,13 @@ cargo install grcov --locked
|
||||
|
||||
After that, you can simply run `coverage.py` to test the project with coverage. You can optionally
|
||||
supply the `--open` flag to open the coverage report in your webbrowser.
|
||||
|
||||
# Miri
|
||||
|
||||
You can run the [`miri`](https://github.com/rust-lang/miri) tool on this library to check for
|
||||
undefined behaviour (UB). This library does not use use any `unsafe` code blocks, but `miri` could
|
||||
still catch UB from used libraries.
|
||||
|
||||
```sh
|
||||
cargo +nightly miri nextest run --all-features
|
||||
```
|
||||
|
4
automation/Jenkinsfile
vendored
4
automation/Jenkinsfile
vendored
@ -21,7 +21,9 @@ pipeline {
|
||||
}
|
||||
stage('Docs') {
|
||||
steps {
|
||||
sh 'cargo +nightly doc --all-features'
|
||||
sh """
|
||||
RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
|
||||
"""
|
||||
}
|
||||
}
|
||||
stage('Rustfmt') {
|
||||
|
3
docs.sh
Executable file
3
docs.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
export RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options"
|
||||
cargo +nightly doc --all-features --open
|
@ -4,7 +4,9 @@ Checklist for new releases
|
||||
# Pre-Release
|
||||
|
||||
1. Make sure any new modules are documented sufficiently enough and check docs with
|
||||
`cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docs_rs"]' --open`.
|
||||
`RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features --open`
|
||||
or `cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docsrs" --generate-link-to-definition"]' --open`
|
||||
(was problematic on more recent nightly versions).
|
||||
2. Bump version specifier in `Cargo.toml`.
|
||||
3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new
|
||||
`unreleased` section.
|
||||
|
@ -1,5 +1,4 @@
|
||||
//! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
|
||||
use crate::cfdp::TlvLvError;
|
||||
use crate::ByteConversionError;
|
||||
use core::str::Utf8Error;
|
||||
#[cfg(feature = "serde")]
|
||||
@ -7,6 +6,8 @@ use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "std")]
|
||||
use std::string::String;
|
||||
|
||||
use super::TlvLvDataTooLargeError;
|
||||
|
||||
pub const MIN_LV_LEN: usize = 1;
|
||||
|
||||
/// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
|
||||
@ -63,9 +64,9 @@ pub(crate) fn generic_len_check_deserialization(
|
||||
|
||||
impl<'data> Lv<'data> {
|
||||
#[inline]
|
||||
pub fn new(data: &[u8]) -> Result<Lv, TlvLvError> {
|
||||
pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLargeError> {
|
||||
if data.len() > u8::MAX as usize {
|
||||
return Err(TlvLvError::DataTooLarge(data.len()));
|
||||
return Err(TlvLvDataTooLargeError(data.len()));
|
||||
}
|
||||
Ok(Lv {
|
||||
data,
|
||||
@ -85,7 +86,7 @@ impl<'data> Lv<'data> {
|
||||
/// Helper function to build a string LV. This is especially useful for the file or directory
|
||||
/// path LVs
|
||||
#[inline]
|
||||
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvError> {
|
||||
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLargeError> {
|
||||
Self::new(str_slice.as_bytes())
|
||||
}
|
||||
|
||||
@ -93,7 +94,7 @@ impl<'data> Lv<'data> {
|
||||
/// path LVs
|
||||
#[cfg(feature = "std")]
|
||||
#[inline]
|
||||
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvError> {
|
||||
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLargeError> {
|
||||
Self::new(string.as_bytes())
|
||||
}
|
||||
|
||||
@ -177,10 +178,10 @@ impl<'data> Lv<'data> {
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
use alloc::string::ToString;
|
||||
|
||||
use crate::cfdp::TlvLvError;
|
||||
use super::*;
|
||||
|
||||
use crate::ByteConversionError;
|
||||
use std::string::String;
|
||||
|
||||
@ -271,15 +272,11 @@ pub mod tests {
|
||||
let lv = Lv::new(&data_big);
|
||||
assert!(lv.is_err());
|
||||
let error = lv.unwrap_err();
|
||||
if let TlvLvError::DataTooLarge(size) = error {
|
||||
assert_eq!(size, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
} else {
|
||||
panic!("invalid exception {:?}", error)
|
||||
}
|
||||
assert_eq!(error.0, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1,11 +1,8 @@
|
||||
//! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf).
|
||||
use crate::ByteConversionError;
|
||||
use core::fmt::{Display, Formatter};
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
|
||||
pub mod lv;
|
||||
pub mod pdu;
|
||||
@ -116,7 +113,7 @@ pub enum ConditionCode {
|
||||
FileSizeError = 0b0110,
|
||||
NakLimitReached = 0b0111,
|
||||
InactivityDetected = 0b1000,
|
||||
CheckLimitReached = 0b1001,
|
||||
CheckLimitReached = 0b1010,
|
||||
UnsupportedChecksumType = 0b1011,
|
||||
/// Not an actual fault condition for which fault handler overrides can be specified
|
||||
SuspendRequestReceived = 0b1110,
|
||||
@ -176,76 +173,43 @@ impl Default for ChecksumType {
|
||||
|
||||
pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4];
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
#[error("data with size {0} larger than allowed {max} bytes", max = u8::MAX)]
|
||||
pub struct TlvLvDataTooLargeError(pub usize);
|
||||
|
||||
/// First value: Found value. Second value: Expected value if there is one.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
#[error("invalid TLV type field, found {found}, expected {expected:?}")]
|
||||
pub struct InvalidTlvTypeFieldError {
|
||||
found: u8,
|
||||
expected: Option<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum TlvLvError {
|
||||
DataTooLarge(usize),
|
||||
ByteConversion(ByteConversionError),
|
||||
/// First value: Found value. Second value: Expected value if there is one.
|
||||
InvalidTlvTypeField {
|
||||
found: u8,
|
||||
expected: Option<u8>,
|
||||
},
|
||||
/// Logically invalid value length detected. The value length may not exceed 255 bytes.
|
||||
/// Depending on the concrete TLV type, the value length may also be logically invalid.
|
||||
#[error("{0}")]
|
||||
DataTooLarge(#[from] TlvLvDataTooLargeError),
|
||||
#[error("byte conversion error: {0}")]
|
||||
ByteConversion(#[from] ByteConversionError),
|
||||
#[error("{0}")]
|
||||
InvalidTlvTypeField(#[from] InvalidTlvTypeFieldError),
|
||||
#[error("invalid value length {0}")]
|
||||
InvalidValueLength(usize),
|
||||
/// Only applies to filestore requests and responses. Second name was missing where one is
|
||||
/// expected.
|
||||
#[error("second name missing for filestore request or response")]
|
||||
SecondNameMissing,
|
||||
/// Invalid action code for filestore requests or responses.
|
||||
#[error("invalid action code {0}")]
|
||||
InvalidFilestoreActionCode(u8),
|
||||
}
|
||||
|
||||
impl From<ByteConversionError> for TlvLvError {
|
||||
fn from(value: ByteConversionError) -> Self {
|
||||
Self::ByteConversion(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TlvLvError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
TlvLvError::DataTooLarge(data_len) => {
|
||||
write!(
|
||||
f,
|
||||
"data with size {} larger than allowed {} bytes",
|
||||
data_len,
|
||||
u8::MAX
|
||||
)
|
||||
}
|
||||
TlvLvError::ByteConversion(e) => {
|
||||
write!(f, "tlv or lv byte conversion: {}", e)
|
||||
}
|
||||
TlvLvError::InvalidTlvTypeField { found, expected } => {
|
||||
write!(
|
||||
f,
|
||||
"invalid TLV type field, found {found}, expected {expected:?}"
|
||||
)
|
||||
}
|
||||
TlvLvError::InvalidValueLength(len) => {
|
||||
write!(f, "invalid value length {len}")
|
||||
}
|
||||
TlvLvError::SecondNameMissing => {
|
||||
write!(f, "second name missing for filestore request or response")
|
||||
}
|
||||
TlvLvError::InvalidFilestoreActionCode(raw) => {
|
||||
write!(f, "invalid filestore action code with raw value {raw}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for TlvLvError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match self {
|
||||
TlvLvError::ByteConversion(e) => Some(e),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -25,20 +25,36 @@ pub struct EofPdu {
|
||||
}
|
||||
|
||||
impl EofPdu {
|
||||
pub fn new_no_error(mut pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self {
|
||||
pub fn new(
|
||||
mut pdu_header: PduHeader,
|
||||
condition_code: ConditionCode,
|
||||
file_checksum: u32,
|
||||
file_size: u64,
|
||||
fault_location: Option<EntityIdTlv>,
|
||||
) -> Self {
|
||||
// Force correct direction flag.
|
||||
pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
|
||||
let mut eof_pdu = Self {
|
||||
pdu_header,
|
||||
condition_code: ConditionCode::NoError,
|
||||
condition_code,
|
||||
file_checksum,
|
||||
file_size,
|
||||
fault_location: None,
|
||||
fault_location,
|
||||
};
|
||||
eof_pdu.pdu_header.pdu_datafield_len = eof_pdu.calc_pdu_datafield_len() as u16;
|
||||
eof_pdu
|
||||
}
|
||||
|
||||
pub fn new_no_error(pdu_header: PduHeader, file_checksum: u32, file_size: u64) -> Self {
|
||||
Self::new(
|
||||
pdu_header,
|
||||
ConditionCode::NoError,
|
||||
file_checksum,
|
||||
file_size,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn pdu_header(&self) -> &PduHeader {
|
||||
&self.pdu_header
|
||||
}
|
||||
@ -148,7 +164,7 @@ impl WritablePduPacket for EofPdu {
|
||||
&mut buf[current_idx..],
|
||||
)?;
|
||||
if let Some(fault_location) = self.fault_location {
|
||||
current_idx += fault_location.write_to_bytes(buf)?;
|
||||
current_idx += fault_location.write_to_bytes(&mut buf[current_idx..])?;
|
||||
}
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
current_idx = add_pdu_crc(buf, current_idx);
|
||||
@ -171,13 +187,23 @@ mod tests {
|
||||
use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode};
|
||||
#[cfg(feature = "serde")]
|
||||
use crate::tests::generic_serde_test;
|
||||
use crate::util::{UnsignedByteFieldU16, UnsignedEnum};
|
||||
|
||||
fn verify_state(&eof_pdu: &EofPdu, file_flag: LargeFileFlag) {
|
||||
fn verify_state_no_error_no_crc(eof_pdu: &EofPdu, file_flag: LargeFileFlag) {
|
||||
verify_state(eof_pdu, CrcFlag::NoCrc, file_flag, ConditionCode::NoError);
|
||||
}
|
||||
|
||||
fn verify_state(
|
||||
eof_pdu: &EofPdu,
|
||||
crc_flag: CrcFlag,
|
||||
file_flag: LargeFileFlag,
|
||||
cond_code: ConditionCode,
|
||||
) {
|
||||
assert_eq!(eof_pdu.file_checksum(), 0x01020304);
|
||||
assert_eq!(eof_pdu.file_size(), 12);
|
||||
assert_eq!(eof_pdu.condition_code(), ConditionCode::NoError);
|
||||
assert_eq!(eof_pdu.condition_code(), cond_code);
|
||||
|
||||
assert_eq!(eof_pdu.crc_flag(), CrcFlag::NoCrc);
|
||||
assert_eq!(eof_pdu.crc_flag(), crc_flag);
|
||||
assert_eq!(eof_pdu.file_flag(), file_flag);
|
||||
assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective);
|
||||
assert_eq!(
|
||||
@ -197,7 +223,7 @@ mod tests {
|
||||
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
|
||||
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
|
||||
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4);
|
||||
verify_state(&eof_pdu, LargeFileFlag::Normal);
|
||||
verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Normal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -271,7 +297,7 @@ mod tests {
|
||||
buf[written - 1] -= 1;
|
||||
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
|
||||
let error = EofPdu::from_bytes(&buf).unwrap_err();
|
||||
if let PduError::ChecksumError(e) = error {
|
||||
if let PduError::Checksum(e) = error {
|
||||
assert_eq!(e, crc);
|
||||
} else {
|
||||
panic!("expected crc error");
|
||||
@ -283,7 +309,7 @@ mod tests {
|
||||
let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large);
|
||||
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
|
||||
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
|
||||
verify_state(&eof_pdu, LargeFileFlag::Large);
|
||||
verify_state_no_error_no_crc(&eof_pdu, LargeFileFlag::Large);
|
||||
assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4);
|
||||
}
|
||||
|
||||
@ -295,4 +321,48 @@ mod tests {
|
||||
let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12);
|
||||
generic_serde_test(eof_pdu);
|
||||
}
|
||||
|
||||
fn generic_test_with_fault_location_and_error(crc: CrcFlag) {
|
||||
let pdu_conf = common_pdu_conf(crc, LargeFileFlag::Normal);
|
||||
let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0);
|
||||
let eof_pdu = EofPdu::new(
|
||||
pdu_header,
|
||||
ConditionCode::FileChecksumFailure,
|
||||
0x01020304,
|
||||
12,
|
||||
Some(EntityIdTlv::new(UnsignedByteFieldU16::new(5).into())),
|
||||
);
|
||||
let mut expected_len = pdu_header.header_len() + 2 + 4 + 4 + 4;
|
||||
if crc == CrcFlag::WithCrc {
|
||||
expected_len += 2;
|
||||
}
|
||||
// Entity ID TLV increaes length by 4.
|
||||
assert_eq!(eof_pdu.len_written(), expected_len);
|
||||
verify_state(
|
||||
&eof_pdu,
|
||||
crc,
|
||||
LargeFileFlag::Normal,
|
||||
ConditionCode::FileChecksumFailure,
|
||||
);
|
||||
let eof_vec = eof_pdu.to_vec().unwrap();
|
||||
let eof_read_back = EofPdu::from_bytes(&eof_vec);
|
||||
if let Err(e) = eof_read_back {
|
||||
panic!("deserialization failed with: {e}")
|
||||
}
|
||||
let eof_read_back = eof_read_back.unwrap();
|
||||
assert_eq!(eof_read_back, eof_pdu);
|
||||
assert!(eof_read_back.fault_location.is_some());
|
||||
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().value(), 5);
|
||||
assert_eq!(eof_read_back.fault_location.unwrap().entity_id().size(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_fault_location_and_error() {
|
||||
generic_test_with_fault_location_and_error(CrcFlag::NoCrc);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_fault_location_and_error_and_crc() {
|
||||
generic_test_with_fault_location_and_error(CrcFlag::WithCrc);
|
||||
}
|
||||
}
|
||||
|
@ -92,16 +92,67 @@ impl<'seg_meta> SegmentMetadata<'seg_meta> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
struct FdPduBase<'seg_meta> {
|
||||
pdu_header: PduHeader,
|
||||
#[cfg_attr(feature = "serde", serde(borrow))]
|
||||
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
|
||||
offset: u64,
|
||||
}
|
||||
|
||||
impl CfdpPdu for FdPduBase<'_> {
|
||||
fn pdu_header(&self) -> &PduHeader {
|
||||
&self.pdu_header
|
||||
}
|
||||
|
||||
fn file_directive_type(&self) -> Option<FileDirectiveType> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl FdPduBase<'_> {
|
||||
fn calc_pdu_datafield_len(&self, file_data_len: u64) -> usize {
|
||||
let mut len = core::mem::size_of::<u32>();
|
||||
if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large {
|
||||
len += 4;
|
||||
}
|
||||
if self.segment_metadata.is_some() {
|
||||
len += self.segment_metadata.as_ref().unwrap().written_len()
|
||||
}
|
||||
len += file_data_len as usize;
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
len += 2;
|
||||
}
|
||||
len
|
||||
}
|
||||
|
||||
fn write_common_fields_to_bytes(&self, buf: &mut [u8]) -> Result<usize, PduError> {
|
||||
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
|
||||
if self.segment_metadata.is_some() {
|
||||
current_idx += self
|
||||
.segment_metadata
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.write_to_bytes(&mut buf[current_idx..])?;
|
||||
}
|
||||
current_idx += write_fss_field(
|
||||
self.pdu_header.common_pdu_conf().file_flag,
|
||||
self.offset,
|
||||
&mut buf[current_idx..],
|
||||
)?;
|
||||
Ok(current_idx)
|
||||
}
|
||||
}
|
||||
|
||||
/// File Data PDU abstraction.
|
||||
///
|
||||
/// For more information, refer to CFDP chapter 5.3.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct FileDataPdu<'seg_meta, 'file_data> {
|
||||
pdu_header: PduHeader,
|
||||
#[cfg_attr(feature = "serde", serde(borrow))]
|
||||
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
|
||||
offset: u64,
|
||||
common: FdPduBase<'seg_meta>,
|
||||
file_data: &'file_data [u8],
|
||||
}
|
||||
|
||||
@ -134,42 +185,34 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
|
||||
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
|
||||
}
|
||||
let mut pdu = Self {
|
||||
pdu_header,
|
||||
segment_metadata,
|
||||
offset,
|
||||
common: FdPduBase {
|
||||
pdu_header,
|
||||
segment_metadata,
|
||||
offset,
|
||||
},
|
||||
file_data,
|
||||
};
|
||||
pdu.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
|
||||
pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
|
||||
pdu
|
||||
}
|
||||
|
||||
fn calc_pdu_datafield_len(&self) -> usize {
|
||||
let mut len = core::mem::size_of::<u32>();
|
||||
if self.pdu_header.pdu_conf.file_flag == LargeFileFlag::Large {
|
||||
len += 4;
|
||||
}
|
||||
if self.segment_metadata.is_some() {
|
||||
len += self.segment_metadata.as_ref().unwrap().written_len()
|
||||
}
|
||||
len += self.file_data.len();
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
len += 2;
|
||||
}
|
||||
len
|
||||
self.common
|
||||
.calc_pdu_datafield_len(self.file_data.len() as u64)
|
||||
}
|
||||
|
||||
pub fn segment_metadata(&self) -> Option<&SegmentMetadata> {
|
||||
self.common.segment_metadata.as_ref()
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> u64 {
|
||||
self.offset
|
||||
self.common.offset
|
||||
}
|
||||
|
||||
pub fn file_data(&self) -> &'file_data [u8] {
|
||||
self.file_data
|
||||
}
|
||||
|
||||
pub fn segment_metadata(&self) -> Option<&SegmentMetadata> {
|
||||
self.segment_metadata.as_ref()
|
||||
}
|
||||
|
||||
pub fn from_bytes<'buf: 'seg_meta + 'file_data>(buf: &'buf [u8]) -> Result<Self, PduError> {
|
||||
let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?;
|
||||
let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?;
|
||||
@ -190,16 +233,18 @@ impl<'seg_meta, 'file_data> FileDataPdu<'seg_meta, 'file_data> {
|
||||
.into());
|
||||
}
|
||||
Ok(Self {
|
||||
pdu_header,
|
||||
segment_metadata,
|
||||
offset,
|
||||
common: FdPduBase {
|
||||
pdu_header,
|
||||
segment_metadata,
|
||||
offset,
|
||||
},
|
||||
file_data: &buf[current_idx..full_len_without_crc],
|
||||
})
|
||||
}
|
||||
}
|
||||
impl CfdpPdu for FileDataPdu<'_, '_> {
|
||||
fn pdu_header(&self) -> &PduHeader {
|
||||
&self.pdu_header
|
||||
&self.common.pdu_header
|
||||
}
|
||||
|
||||
fn file_directive_type(&self) -> Option<FileDirectiveType> {
|
||||
@ -216,19 +261,8 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let mut current_idx = self.pdu_header.write_to_bytes(buf)?;
|
||||
if self.segment_metadata.is_some() {
|
||||
current_idx += self
|
||||
.segment_metadata
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.write_to_bytes(&mut buf[current_idx..])?;
|
||||
}
|
||||
current_idx += write_fss_field(
|
||||
self.pdu_header.common_pdu_conf().file_flag,
|
||||
self.offset,
|
||||
&mut buf[current_idx..],
|
||||
)?;
|
||||
|
||||
let mut current_idx = self.common.write_common_fields_to_bytes(buf)?;
|
||||
buf[current_idx..current_idx + self.file_data.len()].copy_from_slice(self.file_data);
|
||||
current_idx += self.file_data.len();
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
@ -238,10 +272,167 @@ impl WritablePduPacket for FileDataPdu<'_, '_> {
|
||||
}
|
||||
|
||||
fn len_written(&self) -> usize {
|
||||
self.pdu_header.header_len() + self.calc_pdu_datafield_len()
|
||||
self.common.pdu_header.header_len() + self.calc_pdu_datafield_len()
|
||||
}
|
||||
}
|
||||
|
||||
/// File Data PDU creator abstraction.
|
||||
///
|
||||
/// This special creator object allows to read into the file data buffer directly. This avoids
|
||||
/// the need of an additional buffer to create a file data PDU. This structure therefore
|
||||
/// does not implement the regular [WritablePduPacket] trait.
|
||||
///
|
||||
/// For more information, refer to CFDP chapter 5.3.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct FileDataPduCreatorWithReservedDatafield<'seg_meta> {
|
||||
#[cfg_attr(feature = "serde", serde(borrow))]
|
||||
common: FdPduBase<'seg_meta>,
|
||||
file_data_len: u64,
|
||||
}
|
||||
|
||||
impl<'seg_meta> FileDataPduCreatorWithReservedDatafield<'seg_meta> {
|
||||
pub fn new_with_seg_metadata(
|
||||
pdu_header: PduHeader,
|
||||
segment_metadata: SegmentMetadata<'seg_meta>,
|
||||
offset: u64,
|
||||
file_data_len: u64,
|
||||
) -> Self {
|
||||
Self::new_generic(pdu_header, Some(segment_metadata), offset, file_data_len)
|
||||
}
|
||||
|
||||
pub fn new_no_seg_metadata(pdu_header: PduHeader, offset: u64, file_data_len: u64) -> Self {
|
||||
Self::new_generic(pdu_header, None, offset, file_data_len)
|
||||
}
|
||||
|
||||
pub fn new_generic(
|
||||
mut pdu_header: PduHeader,
|
||||
segment_metadata: Option<SegmentMetadata<'seg_meta>>,
|
||||
offset: u64,
|
||||
file_data_len: u64,
|
||||
) -> Self {
|
||||
pdu_header.pdu_type = PduType::FileData;
|
||||
if segment_metadata.is_some() {
|
||||
pdu_header.seg_metadata_flag = SegmentMetadataFlag::Present;
|
||||
}
|
||||
let mut pdu = Self {
|
||||
common: FdPduBase {
|
||||
pdu_header,
|
||||
segment_metadata,
|
||||
offset,
|
||||
},
|
||||
file_data_len,
|
||||
};
|
||||
pdu.common.pdu_header.pdu_datafield_len = pdu.calc_pdu_datafield_len() as u16;
|
||||
pdu
|
||||
}
|
||||
|
||||
fn calc_pdu_datafield_len(&self) -> usize {
|
||||
self.common.calc_pdu_datafield_len(self.file_data_len)
|
||||
}
|
||||
|
||||
pub fn len_written(&self) -> usize {
|
||||
self.common.pdu_header.header_len() + self.calc_pdu_datafield_len()
|
||||
}
|
||||
|
||||
/// This function performs a partial write by writing all data except the file data
|
||||
/// and the CRC.
|
||||
///
|
||||
/// It returns a [FileDataPduCreatorWithUnwrittenData] which provides a mutable slice to
|
||||
/// the reserved file data field. The user can read file data into this field directly and
|
||||
/// then finish the PDU creation using the [FileDataPduCreatorWithUnwrittenData::finish] call.
|
||||
pub fn write_to_bytes_partially<'buf>(
|
||||
&self,
|
||||
buf: &'buf mut [u8],
|
||||
) -> Result<FileDataPduCreatorWithUnwrittenData<'buf>, PduError> {
|
||||
if buf.len() < self.len_written() {
|
||||
return Err(ByteConversionError::ToSliceTooSmall {
|
||||
found: buf.len(),
|
||||
expected: self.len_written(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let mut current_idx = self.common.write_common_fields_to_bytes(buf)?;
|
||||
let file_data_offset = current_idx as u64;
|
||||
current_idx += self.file_data_len as usize;
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
current_idx += 2;
|
||||
}
|
||||
Ok(FileDataPduCreatorWithUnwrittenData {
|
||||
write_buf: &mut buf[0..current_idx],
|
||||
file_data_offset,
|
||||
file_data_len: self.file_data_len,
|
||||
needs_crc: self.crc_flag() == CrcFlag::WithCrc,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl CfdpPdu for FileDataPduCreatorWithReservedDatafield<'_> {
|
||||
fn pdu_header(&self) -> &PduHeader {
|
||||
&self.common.pdu_header
|
||||
}
|
||||
|
||||
fn file_directive_type(&self) -> Option<FileDirectiveType> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// This structure is created with [FileDataPduCreatorWithReservedDatafield::write_to_bytes_partially]
|
||||
/// and provides an API to read file data from the virtual filesystem into the file data PDU buffer
|
||||
/// directly.
|
||||
///
|
||||
/// This structure provides a mutable slice to the reserved file data field. The user can read
|
||||
/// file data into this field directly and then finish the PDU creation using the
|
||||
/// [FileDataPduCreatorWithUnwrittenData::finish] call.
|
||||
pub struct FileDataPduCreatorWithUnwrittenData<'buf> {
|
||||
write_buf: &'buf mut [u8],
|
||||
file_data_offset: u64,
|
||||
file_data_len: u64,
|
||||
needs_crc: bool,
|
||||
}
|
||||
|
||||
impl FileDataPduCreatorWithUnwrittenData<'_> {
|
||||
pub fn file_data_field_mut(&mut self) -> &mut [u8] {
|
||||
&mut self.write_buf[self.file_data_offset as usize
|
||||
..self.file_data_offset as usize + self.file_data_len as usize]
|
||||
}
|
||||
|
||||
/// This functio needs to be called to add a CRC to the file data PDU where applicable.
|
||||
///
|
||||
/// It returns the full written size of the PDU.
|
||||
pub fn finish(self) -> usize {
|
||||
if self.needs_crc {
|
||||
add_pdu_crc(
|
||||
self.write_buf,
|
||||
self.file_data_offset as usize + self.file_data_len as usize,
|
||||
);
|
||||
}
|
||||
self.write_buf.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// This function can be used to calculate the maximum allowed file segment size for
|
||||
/// a given maximum packet length and the segment metadata if there is any.
|
||||
pub fn calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(
|
||||
pdu_header: &PduHeader,
|
||||
max_packet_len: usize,
|
||||
segment_metadata: Option<&SegmentMetadata>,
|
||||
) -> usize {
|
||||
let mut subtract = pdu_header.header_len();
|
||||
if segment_metadata.is_some() {
|
||||
subtract += 1 + segment_metadata.as_ref().unwrap().metadata().unwrap().len();
|
||||
}
|
||||
if pdu_header.common_pdu_conf().file_flag == LargeFileFlag::Large {
|
||||
subtract += 8;
|
||||
} else {
|
||||
subtract += 4;
|
||||
}
|
||||
if pdu_header.common_pdu_conf().crc_flag == CrcFlag::WithCrc {
|
||||
subtract += 2;
|
||||
}
|
||||
max_packet_len.saturating_sub(subtract)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -263,7 +454,7 @@ mod tests {
|
||||
assert!(fd_pdu.segment_metadata().is_none());
|
||||
assert_eq!(
|
||||
fd_pdu.len_written(),
|
||||
fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4
|
||||
fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4
|
||||
);
|
||||
|
||||
assert_eq!(fd_pdu.crc_flag(), CrcFlag::NoCrc);
|
||||
@ -290,11 +481,11 @@ mod tests {
|
||||
let written = res.unwrap();
|
||||
assert_eq!(
|
||||
written,
|
||||
fd_pdu.pdu_header.header_len() + core::mem::size_of::<u32>() + 4
|
||||
fd_pdu.pdu_header().header_len() + core::mem::size_of::<u32>() + 4
|
||||
);
|
||||
let mut current_idx = fd_pdu.pdu_header.header_len();
|
||||
let mut current_idx = fd_pdu.pdu_header().header_len();
|
||||
let file_size = u32::from_be_bytes(
|
||||
buf[fd_pdu.pdu_header.header_len()..fd_pdu.pdu_header.header_len() + 4]
|
||||
buf[fd_pdu.pdu_header().header_len()..fd_pdu.pdu_header().header_len() + 4]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
);
|
||||
@ -353,7 +544,7 @@ mod tests {
|
||||
buf[written - 1] -= 1;
|
||||
let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16;
|
||||
let error = FileDataPdu::from_bytes(&buf).unwrap_err();
|
||||
if let PduError::ChecksumError(e) = error {
|
||||
if let PduError::Checksum(e) = error {
|
||||
assert_eq!(e, crc);
|
||||
} else {
|
||||
panic!("expected crc error");
|
||||
@ -380,7 +571,7 @@ mod tests {
|
||||
assert_eq!(*fd_pdu.segment_metadata().unwrap(), segment_meta);
|
||||
assert_eq!(
|
||||
fd_pdu.len_written(),
|
||||
fd_pdu.pdu_header.header_len()
|
||||
fd_pdu.pdu_header().header_len()
|
||||
+ 1
|
||||
+ seg_metadata.len()
|
||||
+ core::mem::size_of::<u32>()
|
||||
@ -390,7 +581,7 @@ mod tests {
|
||||
fd_pdu
|
||||
.write_to_bytes(&mut buf)
|
||||
.expect("writing FD PDU failed");
|
||||
let mut current_idx = fd_pdu.pdu_header.header_len();
|
||||
let mut current_idx = fd_pdu.pdu_header().header_len();
|
||||
assert_eq!(
|
||||
RecordContinuationState::try_from((buf[current_idx] >> 6) & 0b11).unwrap(),
|
||||
RecordContinuationState::StartAndEnd
|
||||
@ -482,4 +673,142 @@ mod tests {
|
||||
let output_converted_back: FileDataPdu = from_bytes(&output).unwrap();
|
||||
assert_eq!(output_converted_back, fd_pdu);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fd_pdu_creator_with_reserved_field_no_crc() {
|
||||
let common_conf =
|
||||
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
let test_str = "hello world!";
|
||||
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
|
||||
pdu_header,
|
||||
10,
|
||||
test_str.len() as u64,
|
||||
);
|
||||
let mut write_buf: [u8; 64] = [0; 64];
|
||||
let mut pdu_unwritten = fd_pdu
|
||||
.write_to_bytes_partially(&mut write_buf)
|
||||
.expect("partial write failed");
|
||||
pdu_unwritten
|
||||
.file_data_field_mut()
|
||||
.copy_from_slice(test_str.as_bytes());
|
||||
pdu_unwritten.finish();
|
||||
|
||||
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
|
||||
assert_eq!(
|
||||
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
|
||||
"hello world!"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fd_pdu_creator_with_reserved_field_with_crc() {
|
||||
let mut common_conf =
|
||||
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
|
||||
common_conf.crc_flag = true.into();
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
let test_str = "hello world!";
|
||||
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
|
||||
pdu_header,
|
||||
10,
|
||||
test_str.len() as u64,
|
||||
);
|
||||
let mut write_buf: [u8; 64] = [0; 64];
|
||||
let mut pdu_unwritten = fd_pdu
|
||||
.write_to_bytes_partially(&mut write_buf)
|
||||
.expect("partial write failed");
|
||||
pdu_unwritten
|
||||
.file_data_field_mut()
|
||||
.copy_from_slice(test_str.as_bytes());
|
||||
pdu_unwritten.finish();
|
||||
|
||||
let pdu_reader = FileDataPdu::from_bytes(&write_buf).expect("reading file data PDU failed");
|
||||
assert_eq!(
|
||||
core::str::from_utf8(pdu_reader.file_data()).expect("reading utf8 string failed"),
|
||||
"hello world!"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fd_pdu_creator_with_reserved_field_with_crc_without_finish_fails() {
|
||||
let mut common_conf =
|
||||
CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap();
|
||||
common_conf.crc_flag = true.into();
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
let test_str = "hello world!";
|
||||
let fd_pdu = FileDataPduCreatorWithReservedDatafield::new_no_seg_metadata(
|
||||
pdu_header,
|
||||
10,
|
||||
test_str.len() as u64,
|
||||
);
|
||||
let mut write_buf: [u8; 64] = [0; 64];
|
||||
let mut pdu_unwritten = fd_pdu
|
||||
.write_to_bytes_partially(&mut write_buf)
|
||||
.expect("partial write failed");
|
||||
pdu_unwritten
|
||||
.file_data_field_mut()
|
||||
.copy_from_slice(test_str.as_bytes());
|
||||
|
||||
let pdu_reader_error = FileDataPdu::from_bytes(&write_buf);
|
||||
assert!(pdu_reader_error.is_err());
|
||||
let error = pdu_reader_error.unwrap_err();
|
||||
match error {
|
||||
PduError::Checksum(_) => (),
|
||||
_ => {
|
||||
panic!("unexpected PDU error {}", error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_file_seg_calculator_0() {
|
||||
let pdu_header = PduHeader::new_for_file_data_default(CommonPduConfig::default(), 0);
|
||||
assert_eq!(
|
||||
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
|
||||
53
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_file_seg_calculator_1() {
|
||||
let common_conf = CommonPduConfig {
|
||||
crc_flag: CrcFlag::WithCrc,
|
||||
..Default::default()
|
||||
};
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
assert_eq!(
|
||||
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
|
||||
51
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_file_seg_calculator_2() {
|
||||
let common_conf = CommonPduConfig {
|
||||
file_flag: LargeFileFlag::Large,
|
||||
..Default::default()
|
||||
};
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
assert_eq!(
|
||||
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 64, None),
|
||||
49
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_file_seg_calculator_saturating_sub() {
|
||||
let common_conf = CommonPduConfig {
|
||||
file_flag: LargeFileFlag::Large,
|
||||
..Default::default()
|
||||
};
|
||||
let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0);
|
||||
assert_eq!(
|
||||
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 15, None),
|
||||
0
|
||||
);
|
||||
assert_eq!(
|
||||
calculate_max_file_seg_len_for_max_packet_len_and_pdu_header(&pdu_header, 14, None),
|
||||
0
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -4,13 +4,14 @@ use crate::cfdp::pdu::{
|
||||
use crate::cfdp::tlv::{
|
||||
EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv,
|
||||
};
|
||||
use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType, TlvLvError};
|
||||
use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType};
|
||||
use crate::ByteConversionError;
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{CfdpPdu, WritablePduPacket};
|
||||
use super::tlv::ReadableTlv;
|
||||
use super::{CfdpPdu, InvalidTlvTypeFieldError, WritablePduPacket};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
@ -331,22 +332,26 @@ impl<'buf> FinishedPduReader<'buf> {
|
||||
// last TLV, everything else would break the whole handling of the packet
|
||||
// TLVs.
|
||||
if current_idx != full_len_without_crc {
|
||||
return Err(PduError::FormatError);
|
||||
return Err(PduError::Format);
|
||||
}
|
||||
} else {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
found: tlv_type.into(),
|
||||
expected: Some(TlvType::FilestoreResponse.into()),
|
||||
}
|
||||
.into());
|
||||
return Err(PduError::TlvLv(
|
||||
InvalidTlvTypeFieldError {
|
||||
found: tlv_type.into(),
|
||||
expected: Some(TlvType::FilestoreResponse.into()),
|
||||
}
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
TlvTypeField::Custom(raw) => {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
found: raw,
|
||||
expected: None,
|
||||
}
|
||||
.into());
|
||||
return Err(PduError::TlvLv(
|
||||
InvalidTlvTypeFieldError {
|
||||
found: raw,
|
||||
expected: None,
|
||||
}
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -563,7 +568,7 @@ mod tests {
|
||||
buf[written - 1] -= 1;
|
||||
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
|
||||
let error = FinishedPduReader::new(&buf).unwrap_err();
|
||||
if let PduError::ChecksumError(e) = error {
|
||||
if let PduError::Checksum(e) = error {
|
||||
assert_eq!(e, crc);
|
||||
} else {
|
||||
panic!("expected crc error");
|
||||
|
@ -1,3 +1,5 @@
|
||||
#[cfg(feature = "alloc")]
|
||||
use super::tlv::TlvOwned;
|
||||
use crate::cfdp::lv::Lv;
|
||||
use crate::cfdp::pdu::{
|
||||
add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field,
|
||||
@ -11,6 +13,7 @@ use alloc::vec::Vec;
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::tlv::ReadableTlv;
|
||||
use super::{CfdpPdu, WritablePduPacket};
|
||||
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -51,18 +54,26 @@ pub fn build_metadata_opts_from_vec(
|
||||
build_metadata_opts_from_slice(buf, tlvs.as_slice())
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn build_metadata_opts_from_owned_slice(tlvs: &[TlvOwned]) -> Vec<u8> {
|
||||
let mut sum_vec = Vec::new();
|
||||
for tlv in tlvs {
|
||||
sum_vec.extend(tlv.to_vec());
|
||||
}
|
||||
sum_vec
|
||||
}
|
||||
|
||||
/// Metadata PDU creator abstraction.
|
||||
///
|
||||
/// This abstraction exposes a specialized API for creating metadata PDUs as specified in
|
||||
/// CFDP chapter 5.2.5.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> {
|
||||
pdu_header: PduHeader,
|
||||
metadata_params: MetadataGenericParams,
|
||||
src_file_name: Lv<'src_name>,
|
||||
dest_file_name: Lv<'dest_name>,
|
||||
options: &'opts [Tlv<'opts>],
|
||||
options: &'opts [u8],
|
||||
}
|
||||
|
||||
impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'opts> {
|
||||
@ -86,7 +97,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
|
||||
metadata_params: MetadataGenericParams,
|
||||
src_file_name: Lv<'src_name>,
|
||||
dest_file_name: Lv<'dest_name>,
|
||||
options: &'opts [Tlv<'opts>],
|
||||
options: &'opts [u8],
|
||||
) -> Self {
|
||||
Self::new(
|
||||
pdu_header,
|
||||
@ -102,7 +113,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
|
||||
metadata_params: MetadataGenericParams,
|
||||
src_file_name: Lv<'src_name>,
|
||||
dest_file_name: Lv<'dest_name>,
|
||||
options: &'opts [Tlv<'opts>],
|
||||
options: &'opts [u8],
|
||||
) -> Self {
|
||||
pdu_header.pdu_type = PduType::FileDirective;
|
||||
pdu_header.pdu_conf.direction = Direction::TowardsReceiver;
|
||||
@ -129,10 +140,19 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
|
||||
self.dest_file_name
|
||||
}
|
||||
|
||||
pub fn options(&self) -> &'opts [Tlv<'opts>] {
|
||||
pub fn options(&self) -> &'opts [u8] {
|
||||
self.options
|
||||
}
|
||||
|
||||
/// Yield an iterator which can be used to loop through all options. Returns [None] if the
|
||||
/// options field is empty.
|
||||
pub fn options_iter(&self) -> OptionsIter<'_> {
|
||||
OptionsIter {
|
||||
opt_buf: self.options,
|
||||
current_idx: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn calc_pdu_datafield_len(&self) -> usize {
|
||||
// One directve type octet and one byte of the directive parameter field.
|
||||
let mut len = 2;
|
||||
@ -143,9 +163,7 @@ impl<'src_name, 'dest_name, 'opts> MetadataPduCreator<'src_name, 'dest_name, 'op
|
||||
}
|
||||
len += self.src_file_name.len_full();
|
||||
len += self.dest_file_name.len_full();
|
||||
for tlv in self.options() {
|
||||
len += tlv.len_full()
|
||||
}
|
||||
len += self.options().len();
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
len += 2;
|
||||
}
|
||||
@ -191,10 +209,8 @@ impl WritablePduPacket for MetadataPduCreator<'_, '_, '_> {
|
||||
current_idx += self
|
||||
.dest_file_name
|
||||
.write_to_be_bytes(&mut buf[current_idx..])?;
|
||||
for opt in self.options() {
|
||||
opt.write_to_bytes(&mut buf[current_idx..current_idx + opt.len_full()])?;
|
||||
current_idx += opt.len_full();
|
||||
}
|
||||
buf[current_idx..current_idx + self.options.len()].copy_from_slice(self.options);
|
||||
current_idx += self.options.len();
|
||||
if self.crc_flag() == CrcFlag::WithCrc {
|
||||
current_idx = add_pdu_crc(buf, current_idx);
|
||||
}
|
||||
@ -355,7 +371,7 @@ pub mod tests {
|
||||
};
|
||||
use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket};
|
||||
use crate::cfdp::pdu::{FileDirectiveType, PduHeader};
|
||||
use crate::cfdp::tlv::{Tlv, TlvType};
|
||||
use crate::cfdp::tlv::{ReadableTlv, Tlv, TlvOwned, TlvType, WritableTlv};
|
||||
use crate::cfdp::{
|
||||
ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag,
|
||||
SegmentationControl, TransmissionMode,
|
||||
@ -365,16 +381,16 @@ pub mod tests {
|
||||
const SRC_FILENAME: &str = "hello-world.txt";
|
||||
const DEST_FILENAME: &str = "hello-world2.txt";
|
||||
|
||||
fn generic_metadata_pdu<'opts>(
|
||||
fn generic_metadata_pdu(
|
||||
crc_flag: CrcFlag,
|
||||
checksum_type: ChecksumType,
|
||||
closure_requested: bool,
|
||||
fss: LargeFileFlag,
|
||||
opts: &'opts [Tlv],
|
||||
opts: &[u8],
|
||||
) -> (
|
||||
Lv<'static>,
|
||||
Lv<'static>,
|
||||
MetadataPduCreator<'static, 'static, 'opts>,
|
||||
MetadataPduCreator<'static, 'static, '_>,
|
||||
) {
|
||||
let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0);
|
||||
let metadata_params = MetadataGenericParams::new(closure_requested, checksum_type, 0x1010);
|
||||
@ -544,9 +560,9 @@ pub mod tests {
|
||||
assert_eq!(written.metadata_params(), read.metadata_params());
|
||||
assert_eq!(written.src_file_name(), read.src_file_name());
|
||||
assert_eq!(written.dest_file_name(), read.dest_file_name());
|
||||
let opts = written.options();
|
||||
for (tlv_written, tlv_read) in opts.iter().zip(read.options_iter().unwrap()) {
|
||||
assert_eq!(tlv_written, &tlv_read);
|
||||
let opts = written.options_iter();
|
||||
for (tlv_written, tlv_read) in opts.zip(read.options_iter().unwrap()) {
|
||||
assert_eq!(&tlv_written, &tlv_read);
|
||||
}
|
||||
}
|
||||
|
||||
@ -661,14 +677,14 @@ pub mod tests {
|
||||
let tlv1 = Tlv::new_empty(TlvType::FlowLabel);
|
||||
let msg_to_user: [u8; 4] = [1, 2, 3, 4];
|
||||
let tlv2 = Tlv::new(TlvType::MsgToUser, &msg_to_user).unwrap();
|
||||
let tlv_vec = vec![tlv1, tlv2];
|
||||
let opts_len = tlv1.len_full() + tlv2.len_full();
|
||||
let mut tlv_buf: [u8; 64] = [0; 64];
|
||||
let opts_len = build_metadata_opts_from_slice(&mut tlv_buf, &[tlv1, tlv2]).unwrap();
|
||||
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
|
||||
CrcFlag::NoCrc,
|
||||
ChecksumType::Crc32,
|
||||
false,
|
||||
LargeFileFlag::Normal,
|
||||
&tlv_vec,
|
||||
&tlv_buf[0..opts_len],
|
||||
);
|
||||
let mut buf: [u8; 128] = [0; 128];
|
||||
let write_res = metadata_pdu.write_to_bytes(&mut buf);
|
||||
@ -691,7 +707,55 @@ pub mod tests {
|
||||
let opts_iter = opts_iter.unwrap();
|
||||
let mut accumulated_len = 0;
|
||||
for (idx, opt) in opts_iter.enumerate() {
|
||||
assert_eq!(tlv_vec[idx], opt);
|
||||
if idx == 0 {
|
||||
assert_eq!(tlv1, opt);
|
||||
} else if idx == 1 {
|
||||
assert_eq!(tlv2, opt);
|
||||
}
|
||||
accumulated_len += opt.len_full();
|
||||
}
|
||||
assert_eq!(accumulated_len, pdu_read_back.options().len());
|
||||
}
|
||||
#[test]
|
||||
fn test_with_owned_opts() {
|
||||
let tlv1 = TlvOwned::new_empty(TlvType::FlowLabel);
|
||||
let msg_to_user: [u8; 4] = [1, 2, 3, 4];
|
||||
let tlv2 = TlvOwned::new(TlvType::MsgToUser, &msg_to_user);
|
||||
let mut all_tlvs = tlv1.to_vec();
|
||||
all_tlvs.extend(tlv2.to_vec());
|
||||
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
|
||||
CrcFlag::NoCrc,
|
||||
ChecksumType::Crc32,
|
||||
false,
|
||||
LargeFileFlag::Normal,
|
||||
&all_tlvs,
|
||||
);
|
||||
let mut buf: [u8; 128] = [0; 128];
|
||||
let write_res = metadata_pdu.write_to_bytes(&mut buf);
|
||||
assert!(write_res.is_ok());
|
||||
let written = write_res.unwrap();
|
||||
assert_eq!(
|
||||
written,
|
||||
metadata_pdu.pdu_header.header_len()
|
||||
+ 1
|
||||
+ 1
|
||||
+ 4
|
||||
+ src_filename.len_full()
|
||||
+ dest_filename.len_full()
|
||||
+ all_tlvs.len()
|
||||
);
|
||||
let pdu_read_back = MetadataPduReader::from_bytes(&buf).unwrap();
|
||||
compare_read_pdu_to_written_pdu(&metadata_pdu, &pdu_read_back);
|
||||
let opts_iter = pdu_read_back.options_iter();
|
||||
assert!(opts_iter.is_some());
|
||||
let opts_iter = opts_iter.unwrap();
|
||||
let mut accumulated_len = 0;
|
||||
for (idx, opt) in opts_iter.enumerate() {
|
||||
if idx == 0 {
|
||||
assert_eq!(tlv1, opt);
|
||||
} else if idx == 1 {
|
||||
assert_eq!(tlv2, opt);
|
||||
}
|
||||
accumulated_len += opt.len_full();
|
||||
}
|
||||
assert_eq!(accumulated_len, pdu_read_back.options().len());
|
||||
@ -716,7 +780,7 @@ pub mod tests {
|
||||
assert_eq!(expected, Some(FileDirectiveType::MetadataPdu));
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"invalid directive type value 255, expected Some(MetadataPdu)"
|
||||
"invalid directive type, found 255, expected Some(MetadataPdu)"
|
||||
);
|
||||
} else {
|
||||
panic!("Expected InvalidDirectiveType error, got {:?}", error);
|
||||
@ -742,7 +806,7 @@ pub mod tests {
|
||||
assert_eq!(expected, FileDirectiveType::MetadataPdu);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"found directive type EofPdu, expected MetadataPdu"
|
||||
"wrong directive type, found EofPdu, expected MetadataPdu"
|
||||
);
|
||||
} else {
|
||||
panic!("Expected InvalidDirectiveType error, got {:?}", error);
|
||||
|
@ -5,9 +5,6 @@ use crate::ByteConversionError;
|
||||
use crate::CRC_CCITT_FALSE;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{Display, Formatter};
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
|
||||
pub mod ack;
|
||||
pub mod eof;
|
||||
@ -30,137 +27,62 @@ pub enum FileDirectiveType {
|
||||
KeepAlivePdu = 0x0c,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum PduError {
|
||||
ByteConversion(ByteConversionError),
|
||||
/// Found version ID invalid, not equal to [CFDP_VERSION_2].
|
||||
#[error("byte conversion error: {0}")]
|
||||
ByteConversion(#[from] ByteConversionError),
|
||||
/// Found version ID invalid, not equal to [super::CFDP_VERSION_2].
|
||||
#[error("CFDP version missmatch, found {0}, expected {ver}", ver = super::CFDP_VERSION_2)]
|
||||
CfdpVersionMissmatch(u8),
|
||||
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
|
||||
#[error("invalid PDU entity ID length {0}, only [1, 2, 4, 8] are allowed")]
|
||||
InvalidEntityLen(u8),
|
||||
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
|
||||
#[error("invalid transaction ID length {0}")]
|
||||
InvalidTransactionSeqNumLen(u8),
|
||||
#[error(
|
||||
"missmatch of PDU source ID length {src_id_len} and destination ID length {dest_id_len}"
|
||||
)]
|
||||
SourceDestIdLenMissmatch {
|
||||
src_id_len: usize,
|
||||
dest_id_len: usize,
|
||||
},
|
||||
/// Wrong directive type, for example when parsing the directive field for a file directive
|
||||
/// PDU.
|
||||
#[error("wrong directive type, found {found:?}, expected {expected:?}")]
|
||||
WrongDirectiveType {
|
||||
found: FileDirectiveType,
|
||||
expected: FileDirectiveType,
|
||||
},
|
||||
/// The directive type field contained a value not in the range of permitted values. This can
|
||||
/// also happen if an invalid value is passed to the ACK PDU constructor.
|
||||
#[error("invalid directive type, found {found:?}, expected {expected:?}")]
|
||||
InvalidDirectiveType {
|
||||
found: u8,
|
||||
expected: Option<FileDirectiveType>,
|
||||
},
|
||||
#[error("invalid start or end of scope value for NAK PDU")]
|
||||
InvalidStartOrEndOfScopeValue,
|
||||
/// Invalid condition code. Contains the raw detected value.
|
||||
#[error("invalid condition code {0}")]
|
||||
InvalidConditionCode(u8),
|
||||
/// Invalid checksum type which is not part of the checksums listed in the
|
||||
/// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/).
|
||||
#[error("invalid checksum type {0}")]
|
||||
InvalidChecksumType(u8),
|
||||
#[error("file size {0} too large")]
|
||||
FileSizeTooLarge(u64),
|
||||
/// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC.
|
||||
ChecksumError(u16),
|
||||
#[error("checksum error for checksum {0}")]
|
||||
Checksum(u16),
|
||||
/// Generic error for invalid PDU formats.
|
||||
FormatError,
|
||||
#[error("generic PDU format error")]
|
||||
Format,
|
||||
/// Error handling a TLV field.
|
||||
TlvLvError(TlvLvError),
|
||||
}
|
||||
|
||||
impl Display for PduError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
PduError::InvalidEntityLen(raw_id) => {
|
||||
write!(
|
||||
f,
|
||||
"invalid PDU entity ID length {raw_id}, only [1, 2, 4, 8] are allowed"
|
||||
)
|
||||
}
|
||||
PduError::InvalidStartOrEndOfScopeValue => {
|
||||
write!(f, "invalid start or end of scope for NAK PDU")
|
||||
}
|
||||
PduError::InvalidTransactionSeqNumLen(raw_id) => {
|
||||
write!(
|
||||
f,
|
||||
"invalid PDUtransaction seq num length {raw_id}, only [1, 2, 4, 8] are allowed"
|
||||
)
|
||||
}
|
||||
PduError::CfdpVersionMissmatch(raw) => {
|
||||
write!(
|
||||
f,
|
||||
"cfdp version missmatch, found {raw}, expected {CFDP_VERSION_2}"
|
||||
)
|
||||
}
|
||||
PduError::SourceDestIdLenMissmatch {
|
||||
src_id_len,
|
||||
dest_id_len,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"missmatch of PDU source length {src_id_len} and destination length {dest_id_len}"
|
||||
)
|
||||
}
|
||||
PduError::ByteConversion(e) => {
|
||||
write!(f, "{}", e)
|
||||
}
|
||||
PduError::FileSizeTooLarge(value) => {
|
||||
write!(f, "file size value {value} exceeds allowed 32 bit width")
|
||||
}
|
||||
PduError::WrongDirectiveType { found, expected } => {
|
||||
write!(f, "found directive type {found:?}, expected {expected:?}")
|
||||
}
|
||||
PduError::InvalidConditionCode(raw_code) => {
|
||||
write!(f, "found invalid condition code with raw value {raw_code}")
|
||||
}
|
||||
PduError::InvalidDirectiveType { found, expected } => {
|
||||
write!(
|
||||
f,
|
||||
"invalid directive type value {found}, expected {expected:?}"
|
||||
)
|
||||
}
|
||||
PduError::InvalidChecksumType(checksum_type) => {
|
||||
write!(f, "invalid checksum type {checksum_type}")
|
||||
}
|
||||
PduError::ChecksumError(checksum) => {
|
||||
write!(f, "checksum error for CRC {checksum:#04x}")
|
||||
}
|
||||
PduError::TlvLvError(error) => {
|
||||
write!(f, "pdu tlv error: {error}")
|
||||
}
|
||||
PduError::FormatError => {
|
||||
write!(f, "generic PDU format error")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for PduError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match self {
|
||||
PduError::ByteConversion(e) => Some(e),
|
||||
PduError::TlvLvError(e) => Some(e),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ByteConversionError> for PduError {
|
||||
#[inline]
|
||||
fn from(value: ByteConversionError) -> Self {
|
||||
Self::ByteConversion(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TlvLvError> for PduError {
|
||||
#[inline]
|
||||
fn from(e: TlvLvError) -> Self {
|
||||
Self::TlvLvError(e)
|
||||
}
|
||||
#[error("PDU error: {0}")]
|
||||
TlvLv(#[from] TlvLvError),
|
||||
}
|
||||
|
||||
pub trait WritablePduPacket {
|
||||
@ -532,7 +454,7 @@ impl PduHeader {
|
||||
let mut digest = CRC_CCITT_FALSE.digest();
|
||||
digest.update(&buf[..self.pdu_len()]);
|
||||
if digest.finalize() != 0 {
|
||||
return Err(PduError::ChecksumError(u16::from_be_bytes(
|
||||
return Err(PduError::Checksum(u16::from_be_bytes(
|
||||
buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(),
|
||||
)));
|
||||
}
|
||||
@ -981,7 +903,7 @@ mod tests {
|
||||
assert_eq!(raw_version, CFDP_VERSION_2 + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"cfdp version missmatch, found 2, expected 1"
|
||||
"CFDP version missmatch, found 2, expected 1"
|
||||
);
|
||||
} else {
|
||||
panic!("invalid exception: {}", error);
|
||||
@ -1029,7 +951,7 @@ mod tests {
|
||||
assert_eq!(expected, 7);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"source slice with size 6 too small, expected at least 7 bytes"
|
||||
"byte conversion error: source slice with size 6 too small, expected at least 7 bytes"
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -1084,7 +1006,7 @@ mod tests {
|
||||
assert_eq!(dest_id_len, 2);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"missmatch of PDU source length 1 and destination length 2"
|
||||
"missmatch of PDU source ID length 1 and destination ID length 2"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -751,7 +751,7 @@ mod tests {
|
||||
if let PduError::InvalidStartOrEndOfScopeValue = error {
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"invalid start or end of scope for NAK PDU"
|
||||
"invalid start or end of scope value for NAK PDU"
|
||||
);
|
||||
} else {
|
||||
panic!("unexpected error {error}");
|
||||
@ -796,7 +796,7 @@ mod tests {
|
||||
nak_vec[nak_pdu.len_written() - 1] -= 1;
|
||||
let nak_pdu_deser = NakPduReader::new(&nak_vec);
|
||||
assert!(nak_pdu_deser.is_err());
|
||||
if let Err(PduError::ChecksumError(raw)) = nak_pdu_deser {
|
||||
if let Err(PduError::Checksum(raw)) = nak_pdu_deser {
|
||||
assert_eq!(
|
||||
raw,
|
||||
u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap())
|
||||
|
@ -9,10 +9,14 @@ use crate::ByteConversionError;
|
||||
use alloc::vec;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
#[cfg(feature = "alloc")]
|
||||
pub use alloc_mod::*;
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError};
|
||||
|
||||
pub mod msg_to_user;
|
||||
|
||||
pub const MIN_TLV_LEN: usize = 2;
|
||||
@ -39,6 +43,26 @@ pub trait GenericTlv {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ReadableTlv {
|
||||
fn value(&self) -> &[u8];
|
||||
|
||||
/// Checks whether the value field is empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
self.value().is_empty()
|
||||
}
|
||||
|
||||
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
|
||||
/// [Self::value]
|
||||
fn len_value(&self) -> usize {
|
||||
self.value().len()
|
||||
}
|
||||
|
||||
/// Returns the full raw length, including the length byte.
|
||||
fn len_full(&self) -> usize {
|
||||
self.len_value() + 2
|
||||
}
|
||||
}
|
||||
|
||||
pub trait WritableTlv {
|
||||
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
|
||||
fn len_written(&self) -> usize;
|
||||
@ -129,14 +153,14 @@ pub struct Tlv<'data> {
|
||||
}
|
||||
|
||||
impl<'data> Tlv<'data> {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvError> {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> {
|
||||
Ok(Tlv {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
lv: Lv::new(data)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvError> {
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> {
|
||||
Ok(Tlv {
|
||||
tlv_type_field: TlvTypeField::Custom(tlv_type),
|
||||
lv: Lv::new(data)?,
|
||||
@ -151,31 +175,11 @@ impl<'data> Tlv<'data> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &[u8] {
|
||||
self.lv.value()
|
||||
}
|
||||
|
||||
/// Checks whether the value field is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.value().is_empty()
|
||||
}
|
||||
|
||||
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
|
||||
/// [Self::value]
|
||||
pub fn len_value(&self) -> usize {
|
||||
self.value().len()
|
||||
}
|
||||
|
||||
/// Returns the full raw length, including the length byte.
|
||||
pub fn len_full(&self) -> usize {
|
||||
self.len_value() + 2
|
||||
}
|
||||
|
||||
/// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the
|
||||
/// bytestream with the exact size of the expected TLV. This function will take care
|
||||
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using
|
||||
/// [Self::len_full].
|
||||
pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, TlvLvError> {
|
||||
pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, ByteConversionError> {
|
||||
generic_len_check_deserialization(buf, MIN_TLV_LEN)?;
|
||||
let mut tlv = Self {
|
||||
tlv_type_field: TlvTypeField::from(buf[0]),
|
||||
@ -192,6 +196,27 @@ impl<'data> Tlv<'data> {
|
||||
pub fn raw_data(&self) -> Option<&[u8]> {
|
||||
self.lv.raw_data()
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
TlvOwned {
|
||||
tlv_type_field: self.tlv_type_field,
|
||||
data: self.value().to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
impl PartialEq<TlvOwned> for Tlv<'_> {
|
||||
fn eq(&self, other: &TlvOwned) -> bool {
|
||||
self.tlv_type_field == other.tlv_type_field && self.value() == other.value()
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadableTlv for Tlv<'_> {
|
||||
fn value(&self) -> &[u8] {
|
||||
self.lv.value()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for Tlv<'_> {
|
||||
@ -212,18 +237,90 @@ impl GenericTlv for Tlv<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
|
||||
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
|
||||
found: raw_type,
|
||||
expected: Some(expected_tlv_type.into()),
|
||||
})?;
|
||||
if tlv_type != expected_tlv_type {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(expected_tlv_type as u8),
|
||||
});
|
||||
#[cfg(feature = "alloc")]
|
||||
pub mod alloc_mod {
|
||||
use super::*;
|
||||
|
||||
/// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime
|
||||
/// associated to a data slice.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub struct TlvOwned {
|
||||
pub(crate) tlv_type_field: TlvTypeField,
|
||||
pub(crate) data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl TlvOwned {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Self {
|
||||
Self {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
data: data.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Self {
|
||||
Self {
|
||||
tlv_type_field: TlvTypeField::Custom(tlv_type),
|
||||
data: data.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a TLV with an empty value field.
|
||||
pub fn new_empty(tlv_type: TlvType) -> Self {
|
||||
Self {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
data: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_tlv(&self) -> Tlv<'_> {
|
||||
Tlv {
|
||||
tlv_type_field: self.tlv_type_field,
|
||||
// The API should ensure that the data length is never to large, so the unwrap for the
|
||||
// LV creation should never be an issue.
|
||||
lv: Lv::new(&self.data).expect("lv creation failed unexpectedly"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadableTlv for TlvOwned {
|
||||
fn value(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for TlvOwned {
|
||||
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
|
||||
generic_len_check_data_serialization(buf, self.data.len(), MIN_TLV_LEN)?;
|
||||
buf[0] = self.tlv_type_field.into();
|
||||
buf[1] = self.data.len() as u8;
|
||||
buf[2..2 + self.data.len()].copy_from_slice(&self.data);
|
||||
Ok(self.len_written())
|
||||
}
|
||||
|
||||
fn len_written(&self) -> usize {
|
||||
self.data.len() + 2
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericTlv for TlvOwned {
|
||||
fn tlv_type_field(&self) -> TlvTypeField {
|
||||
self.tlv_type_field
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Tlv<'_>> for TlvOwned {
|
||||
fn from(value: Tlv<'_>) -> Self {
|
||||
value.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Tlv<'_>> for TlvOwned {
|
||||
fn eq(&self, other: &Tlv) -> bool {
|
||||
self.tlv_type_field == other.tlv_type_field && self.data == other.value()
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -277,11 +374,8 @@ impl EntityIdTlv {
|
||||
Self::len_check(buf)?;
|
||||
self.entity_id
|
||||
.write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?;
|
||||
Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).map_err(|e| match e {
|
||||
TlvLvError::ByteConversion(e) => e,
|
||||
// All other errors are impossible.
|
||||
_ => panic!("unexpected TLV error"),
|
||||
})
|
||||
// Can't fail.
|
||||
Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
@ -307,21 +401,23 @@ impl GenericTlv for EntityIdTlv {
|
||||
impl<'data> TryFrom<Tlv<'data>> for EntityIdTlv {
|
||||
type Error = TlvLvError;
|
||||
|
||||
fn try_from(value: Tlv) -> Result<Self, Self::Error> {
|
||||
fn try_from(value: Tlv) -> Result<Self, TlvLvError> {
|
||||
match value.tlv_type_field {
|
||||
TlvTypeField::Standard(tlv_type) => {
|
||||
if tlv_type != TlvType::EntityId {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
return Err(InvalidTlvTypeFieldError {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(TlvType::EntityId as u8),
|
||||
});
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
TlvTypeField::Custom(val) => {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
return Err(InvalidTlvTypeFieldError {
|
||||
found: val,
|
||||
expected: Some(TlvType::EntityId as u8),
|
||||
});
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
let len_value = value.value().len();
|
||||
@ -752,6 +848,23 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_tlv_type(
|
||||
raw_type: u8,
|
||||
expected_tlv_type: TlvType,
|
||||
) -> Result<(), InvalidTlvTypeFieldError> {
|
||||
let tlv_type = TlvType::try_from(raw_type).map_err(|_| InvalidTlvTypeFieldError {
|
||||
found: raw_type,
|
||||
expected: Some(expected_tlv_type.into()),
|
||||
})?;
|
||||
if tlv_type != expected_tlv_type {
|
||||
return Err(InvalidTlvTypeFieldError {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(expected_tlv_type as u8),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -939,14 +1052,14 @@ mod tests {
|
||||
let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large);
|
||||
assert!(tlv_res.is_err());
|
||||
let error = tlv_res.unwrap_err();
|
||||
if let TlvLvError::DataTooLarge(size) = error {
|
||||
assert_eq!(size, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
} else {
|
||||
panic!("unexpected error {:?}", error);
|
||||
match error {
|
||||
TlvLvDataTooLargeError(size) => {
|
||||
assert_eq!(size, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1256,7 +1369,8 @@ mod tests {
|
||||
let error = EntityIdTlv::try_from(msg_to_user_tlv);
|
||||
assert!(error.is_err());
|
||||
let error = error.unwrap_err();
|
||||
if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
|
||||
if let TlvLvError::InvalidTlvTypeField(InvalidTlvTypeFieldError { found, expected }) = error
|
||||
{
|
||||
assert_eq!(found, TlvType::MsgToUser as u8);
|
||||
assert_eq!(expected, Some(TlvType::EntityId as u8));
|
||||
assert_eq!(
|
||||
@ -1300,4 +1414,71 @@ mod tests {
|
||||
assert_eq!(tlv_as_vec[0], 20);
|
||||
assert_eq!(tlv_as_vec[1], 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tlv_to_owned() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
|
||||
assert!(tlv_res.is_ok());
|
||||
let tlv_res = tlv_res.unwrap();
|
||||
let tlv_owned = tlv_res.to_owned();
|
||||
assert_eq!(tlv_res, tlv_owned);
|
||||
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
|
||||
assert_eq!(tlv_owned_from_conversion, tlv_owned);
|
||||
assert_eq!(tlv_owned_from_conversion, tlv_res);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]);
|
||||
assert_eq!(
|
||||
tlv_res.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::EntityId)
|
||||
);
|
||||
assert_eq!(tlv_res.len_full(), 3);
|
||||
assert_eq!(tlv_res.value().len(), 1);
|
||||
assert_eq!(tlv_res.len_value(), 1);
|
||||
assert!(!tlv_res.is_empty());
|
||||
assert_eq!(tlv_res.value()[0], 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_empty() {
|
||||
let tlv_res = TlvOwned::new_empty(TlvType::FlowLabel);
|
||||
assert_eq!(
|
||||
tlv_res.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::FlowLabel)
|
||||
);
|
||||
assert_eq!(tlv_res.len_full(), 2);
|
||||
assert_eq!(tlv_res.value().len(), 0);
|
||||
assert_eq!(tlv_res.len_value(), 0);
|
||||
assert!(tlv_res.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_custom_type() {
|
||||
let tlv_res = TlvOwned::new_with_custom_type(32, &[]);
|
||||
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
|
||||
assert_eq!(tlv_res.len_full(), 2);
|
||||
assert_eq!(tlv_res.value().len(), 0);
|
||||
assert_eq!(tlv_res.len_value(), 0);
|
||||
assert!(tlv_res.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_conversion_to_bytes() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
|
||||
assert!(tlv_res.is_ok());
|
||||
let tlv_res = tlv_res.unwrap();
|
||||
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
|
||||
assert_eq!(tlv_res.to_vec(), tlv_owned_from_conversion.to_vec());
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,11 @@
|
||||
//! Abstractions for the Message to User CFDP TLV subtype.
|
||||
use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
|
||||
use crate::ByteConversionError;
|
||||
#[cfg(feature = "alloc")]
|
||||
use super::TlvOwned;
|
||||
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
|
||||
use crate::{
|
||||
cfdp::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError},
|
||||
ByteConversionError,
|
||||
};
|
||||
use delegate::delegate;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -10,7 +15,7 @@ pub struct MsgToUserTlv<'data> {
|
||||
|
||||
impl<'data> MsgToUserTlv<'data> {
|
||||
/// Create a new message to user TLV where the type field is set correctly.
|
||||
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvError> {
|
||||
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLargeError> {
|
||||
Ok(Self {
|
||||
tlv: Tlv::new(TlvType::MsgToUser, value)?,
|
||||
})
|
||||
@ -60,21 +65,38 @@ impl<'data> MsgToUserTlv<'data> {
|
||||
match msg_to_user.tlv.tlv_type_field() {
|
||||
TlvTypeField::Standard(tlv_type) => {
|
||||
if tlv_type != TlvType::MsgToUser {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
return Err(InvalidTlvTypeFieldError {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(TlvType::MsgToUser as u8),
|
||||
});
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
TlvTypeField::Custom(raw) => {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
return Err(InvalidTlvTypeFieldError {
|
||||
found: raw,
|
||||
expected: Some(TlvType::MsgToUser as u8),
|
||||
});
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Ok(msg_to_user)
|
||||
}
|
||||
|
||||
pub fn to_tlv(&self) -> Tlv<'data> {
|
||||
self.tlv
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
self.tlv.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<MsgToUserTlv<'a>> for Tlv<'a> {
|
||||
fn from(value: MsgToUserTlv<'a>) -> Tlv<'a> {
|
||||
value.to_tlv()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for MsgToUserTlv<'_> {
|
||||
@ -139,6 +161,40 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_msg_to_user_type_reduction() {
|
||||
let custom_value: [u8; 4] = [1, 2, 3, 4];
|
||||
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
|
||||
let tlv = msg_to_user.to_tlv();
|
||||
assert_eq!(
|
||||
tlv.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::MsgToUser)
|
||||
);
|
||||
|
||||
assert_eq!(tlv.value(), custom_value);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_msg_to_user_to_tlv() {
|
||||
let custom_value: [u8; 4] = [1, 2, 3, 4];
|
||||
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
|
||||
let tlv: Tlv = msg_to_user.into();
|
||||
assert_eq!(msg_to_user.to_tlv(), tlv);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_msg_to_user_owner_converter() {
|
||||
let custom_value: [u8; 4] = [1, 2, 3, 4];
|
||||
let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap();
|
||||
let tlv = msg_to_user.to_owned();
|
||||
assert_eq!(
|
||||
tlv.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::MsgToUser)
|
||||
);
|
||||
|
||||
assert_eq!(tlv.value(), custom_value);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reserved_msg_deserialization() {
|
||||
let custom_value: [u8; 3] = [1, 2, 3];
|
||||
@ -154,9 +210,9 @@ mod tests {
|
||||
fn test_reserved_msg_deserialization_invalid_type() {
|
||||
let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3];
|
||||
let error = MsgToUserTlv::from_bytes(&trash).unwrap_err();
|
||||
if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
|
||||
assert_eq!(found, TlvType::FlowLabel as u8);
|
||||
assert_eq!(expected, Some(TlvType::MsgToUser as u8));
|
||||
if let TlvLvError::InvalidTlvTypeField(inner) = error {
|
||||
assert_eq!(inner.found, TlvType::FlowLabel as u8);
|
||||
assert_eq!(inner.expected, Some(TlvType::MsgToUser as u8));
|
||||
} else {
|
||||
panic!("Wrong error type returned: {:?}", error);
|
||||
}
|
||||
|
@ -6,13 +6,11 @@
|
||||
use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE};
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{Debug, Display, Formatter};
|
||||
use core::fmt::Debug;
|
||||
use core::mem::size_of;
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
|
||||
pub mod event;
|
||||
pub mod hk;
|
||||
@ -148,50 +146,19 @@ pub enum PfcReal {
|
||||
DoubleMilStd = 4,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum PusError {
|
||||
#[error("PUS version {0:?} not supported")]
|
||||
VersionNotSupported(PusVersion),
|
||||
#[error("checksum verification for crc16 {0:#06x} failed")]
|
||||
ChecksumFailure(u16),
|
||||
/// CRC16 needs to be calculated first
|
||||
CrcCalculationMissing,
|
||||
ByteConversion(ByteConversionError),
|
||||
}
|
||||
|
||||
impl Display for PusError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
PusError::VersionNotSupported(v) => {
|
||||
write!(f, "PUS version {v:?} not supported")
|
||||
}
|
||||
PusError::ChecksumFailure(crc) => {
|
||||
write!(f, "checksum verification for crc16 {crc:#06x} failed")
|
||||
}
|
||||
PusError::CrcCalculationMissing => {
|
||||
write!(f, "crc16 was not calculated")
|
||||
}
|
||||
PusError::ByteConversion(e) => {
|
||||
write!(f, "pus error: {e}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for PusError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
if let PusError::ByteConversion(e) = self {
|
||||
return Some(e);
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ByteConversionError> for PusError {
|
||||
fn from(e: ByteConversionError) -> Self {
|
||||
PusError::ByteConversion(e)
|
||||
}
|
||||
//#[error("crc16 was not calculated")]
|
||||
//CrcCalculationMissing,
|
||||
#[error("pus error: {0}")]
|
||||
ByteConversion(#[from] ByteConversionError),
|
||||
}
|
||||
|
||||
/// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry
|
||||
|
@ -45,7 +45,7 @@ use delegate::delegate;
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zerocopy::AsBytes;
|
||||
use zerocopy::{FromBytes, IntoBytes};
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
@ -86,9 +86,9 @@ pub trait GenericPusTcSecondaryHeader {
|
||||
pub mod zc {
|
||||
use crate::ecss::tc::GenericPusTcSecondaryHeader;
|
||||
use crate::ecss::{PusError, PusVersion};
|
||||
use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16};
|
||||
use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16};
|
||||
|
||||
#[derive(FromZeroes, FromBytes, AsBytes, Unaligned)]
|
||||
#[derive(FromBytes, IntoBytes, Immutable, Unaligned)]
|
||||
#[repr(C)]
|
||||
pub struct PusTcSecondaryHeader {
|
||||
version_ack: u8,
|
||||
@ -138,16 +138,6 @@ pub mod zc {
|
||||
self.source_id.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl PusTcSecondaryHeader {
|
||||
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
|
||||
self.write_to(slice)
|
||||
}
|
||||
|
||||
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
|
||||
Self::read_from(slice)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
|
||||
@ -392,8 +382,8 @@ impl WritablePusPacket for PusTcCreator<'_> {
|
||||
curr_idx += CCSDS_HEADER_LEN;
|
||||
let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
|
||||
sec_header
|
||||
.write_to_bytes(&mut slice[curr_idx..curr_idx + tc_header_len])
|
||||
.ok_or(ByteConversionError::ZeroCopyToError)?;
|
||||
.write_to(&mut slice[curr_idx..curr_idx + tc_header_len])
|
||||
.map_err(|_| ByteConversionError::ZeroCopyToError)?;
|
||||
|
||||
curr_idx += tc_header_len;
|
||||
slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data);
|
||||
@ -502,10 +492,10 @@ impl<'raw_data> PusTcReader<'raw_data> {
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let sec_header = zc::PusTcSecondaryHeader::from_bytes(
|
||||
let sec_header = zc::PusTcSecondaryHeader::read_from_bytes(
|
||||
&slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN],
|
||||
)
|
||||
.ok_or(ByteConversionError::ZeroCopyFromError)?;
|
||||
.map_err(|_| ByteConversionError::ZeroCopyFromError)?;
|
||||
current_idx += PUC_TC_SECONDARY_HEADER_LEN;
|
||||
let raw_data = &slice[0..total_len];
|
||||
let pus_tc = Self {
|
||||
|
@ -54,7 +54,7 @@ use crate::{
|
||||
use core::mem::size_of;
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zerocopy::AsBytes;
|
||||
use zerocopy::{FromBytes, IntoBytes};
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
@ -83,9 +83,9 @@ pub trait GenericPusTmSecondaryHeader {
|
||||
pub mod zc {
|
||||
use super::GenericPusTmSecondaryHeader;
|
||||
use crate::ecss::{PusError, PusVersion};
|
||||
use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16};
|
||||
use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16};
|
||||
|
||||
#[derive(FromBytes, FromZeroes, AsBytes, Unaligned)]
|
||||
#[derive(FromBytes, IntoBytes, Immutable, Unaligned)]
|
||||
#[repr(C)]
|
||||
pub struct PusTmSecHeaderWithoutTimestamp {
|
||||
pus_version_and_sc_time_ref_status: u8,
|
||||
@ -117,16 +117,6 @@ pub mod zc {
|
||||
}
|
||||
}
|
||||
|
||||
impl PusTmSecHeaderWithoutTimestamp {
|
||||
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
|
||||
self.write_to(slice)
|
||||
}
|
||||
|
||||
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
|
||||
Self::read_from(slice)
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericPusTmSecondaryHeader for PusTmSecHeaderWithoutTimestamp {
|
||||
#[inline]
|
||||
fn pus_version(&self) -> PusVersion {
|
||||
@ -413,8 +403,8 @@ impl<'time, 'src_data> PusTmCreator<'time, 'src_data> {
|
||||
let sec_header_len = size_of::<zc::PusTmSecHeaderWithoutTimestamp>();
|
||||
let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap();
|
||||
sec_header
|
||||
.write_to_bytes(&mut slice[curr_idx..curr_idx + sec_header_len])
|
||||
.ok_or(ByteConversionError::ZeroCopyToError)?;
|
||||
.write_to(&mut slice[curr_idx..curr_idx + sec_header_len])
|
||||
.map_err(|_| ByteConversionError::ZeroCopyToError)?;
|
||||
curr_idx += sec_header_len;
|
||||
slice[curr_idx..curr_idx + self.sec_header.timestamp.len()]
|
||||
.copy_from_slice(self.sec_header.timestamp);
|
||||
@ -571,10 +561,10 @@ impl<'raw_data> PusTmReader<'raw_data> {
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::from_bytes(
|
||||
let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::read_from_bytes(
|
||||
&slice[current_idx..current_idx + PUS_TM_MIN_SEC_HEADER_LEN],
|
||||
)
|
||||
.ok_or(ByteConversionError::ZeroCopyFromError)?;
|
||||
.map_err(|_| ByteConversionError::ZeroCopyFromError)?;
|
||||
current_idx += PUS_TM_MIN_SEC_HEADER_LEN;
|
||||
let zc_sec_header_wrapper = zc::PusTmSecHeader {
|
||||
zc_header: sec_header_zc,
|
||||
@ -710,7 +700,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
|
||||
if raw_tm_len < CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN + timestamp_len {
|
||||
return None;
|
||||
}
|
||||
let sp_header = crate::zc::SpHeader::from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap();
|
||||
let sp_header = crate::zc::SpHeader::read_from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap();
|
||||
if raw_tm_len < sp_header.total_len() {
|
||||
return None;
|
||||
}
|
||||
@ -751,7 +741,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
|
||||
#[inline]
|
||||
pub fn sp_header(&self) -> crate::zc::SpHeader {
|
||||
// Valid minimum length of packet was checked before.
|
||||
crate::zc::SpHeader::from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap()
|
||||
crate::zc::SpHeader::read_from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap()
|
||||
}
|
||||
|
||||
/// Helper API to generate the portion of the secondary header without a timestamp from the
|
||||
@ -759,7 +749,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
|
||||
#[inline]
|
||||
pub fn sec_header_without_timestamp(&self) -> PusTmSecHeaderWithoutTimestamp {
|
||||
// Valid minimum length of packet was checked before.
|
||||
PusTmSecHeaderWithoutTimestamp::from_bytes(
|
||||
PusTmSecHeaderWithoutTimestamp::read_from_bytes(
|
||||
&self.raw_tm[CCSDS_HEADER_LEN..CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN],
|
||||
)
|
||||
.unwrap()
|
||||
|
90
src/lib.rs
90
src/lib.rs
@ -55,27 +55,23 @@
|
||||
//! println!("{:x?}", &ccsds_buf[0..6]);
|
||||
//! ```
|
||||
#![no_std]
|
||||
#![cfg_attr(docs_rs, feature(doc_auto_cfg))]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#[cfg(feature = "alloc")]
|
||||
extern crate alloc;
|
||||
#[cfg(any(feature = "std", test))]
|
||||
extern crate std;
|
||||
|
||||
use core::{
|
||||
fmt::{Debug, Display, Formatter},
|
||||
hash::Hash,
|
||||
};
|
||||
use core::{fmt::Debug, hash::Hash};
|
||||
use crc::{Crc, CRC_16_IBM_3740};
|
||||
use delegate::delegate;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
use zerocopy::{FromBytes, IntoBytes};
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub mod cfdp;
|
||||
pub mod ecss;
|
||||
pub mod seq_count;
|
||||
pub mod time;
|
||||
pub mod util;
|
||||
|
||||
@ -92,55 +88,24 @@ pub const MAX_APID: u16 = 2u16.pow(11) - 1;
|
||||
pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1;
|
||||
|
||||
/// Generic error type when converting to and from raw byte slices.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum ByteConversionError {
|
||||
/// The passed slice is too small. Returns the passed slice length and expected minimum size
|
||||
ToSliceTooSmall {
|
||||
found: usize,
|
||||
expected: usize,
|
||||
},
|
||||
#[error("target slice with size {found} is too small, expected size of at least {expected}")]
|
||||
ToSliceTooSmall { found: usize, expected: usize },
|
||||
/// The provider buffer is too small. Returns the passed slice length and expected minimum size
|
||||
FromSliceTooSmall {
|
||||
found: usize,
|
||||
expected: usize,
|
||||
},
|
||||
#[error("source slice with size {found} too small, expected at least {expected} bytes")]
|
||||
FromSliceTooSmall { found: usize, expected: usize },
|
||||
/// The [zerocopy] library failed to write to bytes
|
||||
#[error("zerocopy serialization error")]
|
||||
ZeroCopyToError,
|
||||
/// The [zerocopy] library failed to read from bytes
|
||||
#[error("zerocopy deserialization error")]
|
||||
ZeroCopyFromError,
|
||||
}
|
||||
|
||||
impl Display for ByteConversionError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
ByteConversionError::ToSliceTooSmall { found, expected } => {
|
||||
write!(
|
||||
f,
|
||||
"target slice with size {} is too small, expected size of at least {}",
|
||||
found, expected
|
||||
)
|
||||
}
|
||||
ByteConversionError::FromSliceTooSmall { found, expected } => {
|
||||
write!(
|
||||
f,
|
||||
"source slice with size {} too small, expected at least {} bytes",
|
||||
found, expected
|
||||
)
|
||||
}
|
||||
ByteConversionError::ZeroCopyToError => {
|
||||
write!(f, "zerocopy serialization error")
|
||||
}
|
||||
ByteConversionError::ZeroCopyFromError => {
|
||||
write!(f, "zerocopy deserialization error")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for ByteConversionError {}
|
||||
|
||||
/// CCSDS packet type enumeration.
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
@ -732,8 +697,8 @@ impl SpHeader {
|
||||
expected: CCSDS_HEADER_LEN,
|
||||
});
|
||||
}
|
||||
let zc_header = zc::SpHeader::from_bytes(&buf[0..CCSDS_HEADER_LEN])
|
||||
.ok_or(ByteConversionError::ZeroCopyFromError)?;
|
||||
let zc_header = zc::SpHeader::read_from_bytes(&buf[0..CCSDS_HEADER_LEN])
|
||||
.map_err(|_| ByteConversionError::ZeroCopyFromError)?;
|
||||
Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..]))
|
||||
}
|
||||
|
||||
@ -751,8 +716,8 @@ impl SpHeader {
|
||||
}
|
||||
let zc_header: zc::SpHeader = zc::SpHeader::from(*self);
|
||||
zc_header
|
||||
.to_bytes(&mut buf[0..CCSDS_HEADER_LEN])
|
||||
.ok_or(ByteConversionError::ZeroCopyToError)?;
|
||||
.write_to(&mut buf[0..CCSDS_HEADER_LEN])
|
||||
.map_err(|_| ByteConversionError::ZeroCopyToError)?;
|
||||
Ok(&mut buf[CCSDS_HEADER_LEN..])
|
||||
}
|
||||
|
||||
@ -814,9 +779,9 @@ sph_from_other!(SpHeader, crate::zc::SpHeader);
|
||||
pub mod zc {
|
||||
use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK};
|
||||
use zerocopy::byteorder::NetworkEndian;
|
||||
use zerocopy::{AsBytes, FromBytes, FromZeroes, Unaligned, U16};
|
||||
use zerocopy::{FromBytes, Immutable, IntoBytes, Unaligned, U16};
|
||||
|
||||
#[derive(FromBytes, FromZeroes, AsBytes, Unaligned, Debug)]
|
||||
#[derive(FromBytes, IntoBytes, Immutable, Unaligned, Debug)]
|
||||
#[repr(C)]
|
||||
pub struct SpHeader {
|
||||
version_packet_id: U16<NetworkEndian>,
|
||||
@ -841,14 +806,6 @@ pub mod zc {
|
||||
data_len: U16::from(data_len),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
|
||||
SpHeader::read_from(slice)
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self, slice: &mut [u8]) -> Option<()> {
|
||||
self.write_to(slice)
|
||||
}
|
||||
}
|
||||
|
||||
impl CcsdsPacket for SpHeader {
|
||||
@ -917,6 +874,7 @@ pub(crate) mod tests {
|
||||
use postcard::{from_bytes, to_allocvec};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use zerocopy::FromBytes;
|
||||
|
||||
const CONST_SP: SpHeader = SpHeader::new(
|
||||
PacketId::new_for_tc(true, 0x36),
|
||||
@ -1196,7 +1154,7 @@ pub(crate) mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_zc_sph() {
|
||||
use zerocopy::AsBytes;
|
||||
use zerocopy::IntoBytes;
|
||||
|
||||
let sp_header = SpHeader::new_for_unseg_tc_checked(0x7FF, pow(2, 14) - 1, 0)
|
||||
.expect("Error creating SP header");
|
||||
@ -1216,7 +1174,7 @@ pub(crate) mod tests {
|
||||
assert_eq!(slice[5], 0x00);
|
||||
|
||||
let mut slice = [0; 6];
|
||||
sp_header_zc.write_to(slice.as_mut_slice());
|
||||
sp_header_zc.write_to(slice.as_mut_slice()).unwrap();
|
||||
assert_eq!(slice.len(), 6);
|
||||
assert_eq!(slice[0], 0x17);
|
||||
assert_eq!(slice[1], 0xFF);
|
||||
@ -1227,7 +1185,7 @@ pub(crate) mod tests {
|
||||
|
||||
let mut test_vec = vec![0_u8; 6];
|
||||
let slice = test_vec.as_mut_slice();
|
||||
sp_header_zc.write_to(slice);
|
||||
sp_header_zc.write_to(slice).unwrap();
|
||||
let slice = test_vec.as_slice();
|
||||
assert_eq!(slice.len(), 6);
|
||||
assert_eq!(slice[0], 0x17);
|
||||
@ -1237,8 +1195,8 @@ pub(crate) mod tests {
|
||||
assert_eq!(slice[4], 0x00);
|
||||
assert_eq!(slice[5], 0x00);
|
||||
|
||||
let sp_header = zc::SpHeader::from_bytes(slice);
|
||||
assert!(sp_header.is_some());
|
||||
let sp_header = zc::SpHeader::read_from_bytes(slice);
|
||||
assert!(sp_header.is_ok());
|
||||
let sp_header = sp_header.unwrap();
|
||||
assert_eq!(sp_header.ccsds_version(), 0b000);
|
||||
assert_eq!(sp_header.packet_id_raw(), 0x17FF);
|
||||
|
250
src/seq_count.rs
Normal file
250
src/seq_count.rs
Normal file
@ -0,0 +1,250 @@
|
||||
use crate::MAX_SEQ_COUNT;
|
||||
use core::cell::Cell;
|
||||
use paste::paste;
|
||||
#[cfg(feature = "std")]
|
||||
pub use stdmod::*;
|
||||
|
||||
/// Core trait for objects which can provide a sequence count.
|
||||
///
|
||||
/// The core functions are not mutable on purpose to allow easier usage with
|
||||
/// static structs when using the interior mutability pattern. This can be achieved by using
|
||||
/// [Cell], [core::cell::RefCell] or atomic types.
|
||||
pub trait SequenceCountProvider {
|
||||
type Raw: Into<u64>;
|
||||
const MAX_BIT_WIDTH: usize;
|
||||
|
||||
fn get(&self) -> Self::Raw;
|
||||
|
||||
fn increment(&self);
|
||||
|
||||
fn get_and_increment(&self) -> Self::Raw {
|
||||
let val = self.get();
|
||||
self.increment();
|
||||
val
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SeqCountProviderSimple<T: Copy> {
|
||||
seq_count: Cell<T>,
|
||||
max_val: T,
|
||||
}
|
||||
|
||||
macro_rules! impl_for_primitives {
|
||||
($($ty: ident,)+) => {
|
||||
$(
|
||||
paste! {
|
||||
impl SeqCountProviderSimple<$ty> {
|
||||
pub fn [<new_custom_max_val_ $ty>](max_val: $ty) -> Self {
|
||||
Self {
|
||||
seq_count: Cell::new(0),
|
||||
max_val,
|
||||
}
|
||||
}
|
||||
pub fn [<new_ $ty>]() -> Self {
|
||||
Self {
|
||||
seq_count: Cell::new(0),
|
||||
max_val: $ty::MAX
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SeqCountProviderSimple<$ty> {
|
||||
fn default() -> Self {
|
||||
Self::[<new_ $ty>]()
|
||||
}
|
||||
}
|
||||
|
||||
impl SequenceCountProvider for SeqCountProviderSimple<$ty> {
|
||||
type Raw = $ty;
|
||||
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
|
||||
|
||||
fn get(&self) -> Self::Raw {
|
||||
self.seq_count.get()
|
||||
}
|
||||
|
||||
fn increment(&self) {
|
||||
self.get_and_increment();
|
||||
}
|
||||
|
||||
fn get_and_increment(&self) -> Self::Raw {
|
||||
let curr_count = self.seq_count.get();
|
||||
|
||||
if curr_count == self.max_val {
|
||||
self.seq_count.set(0);
|
||||
} else {
|
||||
self.seq_count.set(curr_count + 1);
|
||||
}
|
||||
curr_count
|
||||
}
|
||||
}
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
impl_for_primitives!(u8, u16, u32, u64,);
|
||||
|
||||
/// This is a sequence count provider which wraps around at [MAX_SEQ_COUNT].
|
||||
#[derive(Clone)]
|
||||
pub struct CcsdsSimpleSeqCountProvider {
|
||||
provider: SeqCountProviderSimple<u16>,
|
||||
}
|
||||
|
||||
impl Default for CcsdsSimpleSeqCountProvider {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider: SeqCountProviderSimple::new_custom_max_val_u16(MAX_SEQ_COUNT),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SequenceCountProvider for CcsdsSimpleSeqCountProvider {
|
||||
type Raw = u16;
|
||||
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
|
||||
delegate::delegate! {
|
||||
to self.provider {
|
||||
fn get(&self) -> u16;
|
||||
fn increment(&self);
|
||||
fn get_and_increment(&self) -> u16;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub mod stdmod {
|
||||
use super::*;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
macro_rules! sync_clonable_seq_counter_impl {
|
||||
($($ty: ident,)+) => {
|
||||
$(paste! {
|
||||
/// These sequence counters can be shared between threads and can also be
|
||||
/// configured to wrap around at specified maximum values. Please note that
|
||||
/// that the API provided by this class will not panic und [Mutex] lock errors,
|
||||
/// but it will yield 0 for the getter functions.
|
||||
#[derive(Clone, Default)]
|
||||
pub struct [<SeqCountProviderSync $ty:upper>] {
|
||||
seq_count: Arc<Mutex<$ty>>,
|
||||
max_val: $ty
|
||||
}
|
||||
|
||||
impl [<SeqCountProviderSync $ty:upper>] {
|
||||
pub fn new() -> Self {
|
||||
Self::new_with_max_val($ty::MAX)
|
||||
}
|
||||
|
||||
pub fn new_with_max_val(max_val: $ty) -> Self {
|
||||
Self {
|
||||
seq_count: Arc::default(),
|
||||
max_val
|
||||
}
|
||||
}
|
||||
}
|
||||
impl SequenceCountProvider for [<SeqCountProviderSync $ty:upper>] {
|
||||
type Raw = $ty;
|
||||
const MAX_BIT_WIDTH: usize = core::mem::size_of::<Self::Raw>() * 8;
|
||||
|
||||
fn get(&self) -> $ty {
|
||||
match self.seq_count.lock() {
|
||||
Ok(counter) => *counter,
|
||||
Err(_) => 0
|
||||
}
|
||||
}
|
||||
|
||||
fn increment(&self) {
|
||||
self.get_and_increment();
|
||||
}
|
||||
|
||||
fn get_and_increment(&self) -> $ty {
|
||||
match self.seq_count.lock() {
|
||||
Ok(mut counter) => {
|
||||
let val = *counter;
|
||||
if val == self.max_val {
|
||||
*counter = 0;
|
||||
} else {
|
||||
*counter += 1;
|
||||
}
|
||||
val
|
||||
}
|
||||
Err(_) => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
})+
|
||||
}
|
||||
}
|
||||
sync_clonable_seq_counter_impl!(u8, u16, u32, u64,);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::seq_count::{
|
||||
CcsdsSimpleSeqCountProvider, SeqCountProviderSimple, SeqCountProviderSyncU8,
|
||||
SequenceCountProvider,
|
||||
};
|
||||
use crate::MAX_SEQ_COUNT;
|
||||
|
||||
#[test]
|
||||
fn test_u8_counter() {
|
||||
let u8_counter = SeqCountProviderSimple::<u8>::default();
|
||||
assert_eq!(u8_counter.get(), 0);
|
||||
assert_eq!(u8_counter.get_and_increment(), 0);
|
||||
assert_eq!(u8_counter.get_and_increment(), 1);
|
||||
assert_eq!(u8_counter.get(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_u8_counter_overflow() {
|
||||
let u8_counter = SeqCountProviderSimple::new_u8();
|
||||
for _ in 0..256 {
|
||||
u8_counter.increment();
|
||||
}
|
||||
assert_eq!(u8_counter.get(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ccsds_counter() {
|
||||
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
|
||||
assert_eq!(ccsds_counter.get(), 0);
|
||||
assert_eq!(ccsds_counter.get_and_increment(), 0);
|
||||
assert_eq!(ccsds_counter.get_and_increment(), 1);
|
||||
assert_eq!(ccsds_counter.get(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ccsds_counter_overflow() {
|
||||
let ccsds_counter = CcsdsSimpleSeqCountProvider::default();
|
||||
for _ in 0..MAX_SEQ_COUNT + 1 {
|
||||
ccsds_counter.increment();
|
||||
}
|
||||
assert_eq!(ccsds_counter.get(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_ref_counters() {
|
||||
let sync_u8_counter = SeqCountProviderSyncU8::new();
|
||||
assert_eq!(sync_u8_counter.get(), 0);
|
||||
assert_eq!(sync_u8_counter.get_and_increment(), 0);
|
||||
assert_eq!(sync_u8_counter.get_and_increment(), 1);
|
||||
assert_eq!(sync_u8_counter.get(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_ref_counters_overflow() {
|
||||
let sync_u8_counter = SeqCountProviderSyncU8::new();
|
||||
for _ in 0..u8::MAX as u16 + 1 {
|
||||
sync_u8_counter.increment();
|
||||
}
|
||||
assert_eq!(sync_u8_counter.get(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_ref_counters_overflow_custom_max_val() {
|
||||
let sync_u8_counter = SeqCountProviderSyncU8::new_with_max_val(128);
|
||||
for _ in 0..129 {
|
||||
sync_u8_counter.increment();
|
||||
}
|
||||
assert_eq!(sync_u8_counter.get(), 0);
|
||||
}
|
||||
}
|
@ -71,7 +71,19 @@ mod tests {
|
||||
use std::format;
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_a_unterminated() {
|
||||
fn test_ascii_timestamp_a_unterminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_a(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 10);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_A_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_a_unterminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_a(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -82,7 +94,24 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_a_terminated() {
|
||||
fn test_ascii_timestamp_a_terminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_a_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 10);
|
||||
let z_terminator = stamp.find('Z');
|
||||
assert!(z_terminator.is_some());
|
||||
assert_eq!(
|
||||
z_terminator.unwrap(),
|
||||
FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1 - 1
|
||||
);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1);
|
||||
}
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_a_terminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_a_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -99,7 +128,19 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_b_unterminated() {
|
||||
fn test_ascii_timestamp_b_unterminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_b(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 8);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_B_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_b_unterminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_b(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -110,7 +151,25 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_b_terminated() {
|
||||
fn test_ascii_timestamp_b_terminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_b_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 8);
|
||||
let z_terminator = stamp.find('Z');
|
||||
assert!(z_terminator.is_some());
|
||||
assert_eq!(
|
||||
z_terminator.unwrap(),
|
||||
FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1 - 1
|
||||
);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_b_terminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_b_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
|
@ -7,17 +7,13 @@
|
||||
use crate::private::Sealed;
|
||||
use crate::ByteConversionError;
|
||||
use core::cmp::Ordering;
|
||||
use core::fmt::{Debug, Display, Formatter};
|
||||
use core::fmt::Debug;
|
||||
use core::ops::{Add, AddAssign};
|
||||
use core::time::Duration;
|
||||
|
||||
use delegate::delegate;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use super::StdTimestampError;
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
#[cfg(feature = "std")]
|
||||
use std::time::{SystemTime, SystemTimeError};
|
||||
|
||||
#[cfg(feature = "chrono")]
|
||||
@ -93,49 +89,19 @@ pub enum SubmillisPrecision {
|
||||
Reserved = 0b11,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum CdsError {
|
||||
/// CCSDS days value exceeds maximum allowed size or is negative
|
||||
#[error("invalid ccsds days {0}")]
|
||||
InvalidCcsdsDays(i64),
|
||||
/// There are distinct constructors depending on the days field width detected in the preamble
|
||||
/// field. This error will be returned if there is a missmatch.
|
||||
#[error("wrong constructor for length of day {0:?} detected in preamble")]
|
||||
InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment),
|
||||
DateBeforeCcsdsEpoch(DateBeforeCcsdsEpochError),
|
||||
}
|
||||
|
||||
impl Display for CdsError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
CdsError::InvalidCcsdsDays(days) => {
|
||||
write!(f, "invalid ccsds days {days}")
|
||||
}
|
||||
CdsError::InvalidCtorForDaysOfLenInPreamble(length_of_day) => {
|
||||
write!(
|
||||
f,
|
||||
"wrong constructor for length of day {length_of_day:?} detected in preamble",
|
||||
)
|
||||
}
|
||||
CdsError::DateBeforeCcsdsEpoch(e) => write!(f, "date before CCSDS epoch: {e}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for CdsError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match self {
|
||||
CdsError::DateBeforeCcsdsEpoch(e) => Some(e),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DateBeforeCcsdsEpochError> for CdsError {
|
||||
fn from(value: DateBeforeCcsdsEpochError) -> Self {
|
||||
Self::DateBeforeCcsdsEpoch(value)
|
||||
}
|
||||
#[error("date before CCSDS epoch: {0}")]
|
||||
DateBeforeCcsdsEpoch(#[from] DateBeforeCcsdsEpochError),
|
||||
}
|
||||
|
||||
pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
|
||||
@ -300,20 +266,23 @@ impl CdsConverter for ConversionFromUnix {
|
||||
self.unix_days_seconds
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper struct which generates fields for the CDS time provider from a datetime.
|
||||
#[cfg(feature = "chrono")]
|
||||
struct ConversionFromChronoDatetime {
|
||||
unix_conversion: ConversionFromUnix,
|
||||
submillis_prec: SubmillisPrecision,
|
||||
submillis: u32,
|
||||
}
|
||||
|
||||
#[cfg(feature = "chrono")]
|
||||
impl CdsCommon for ConversionFromChronoDatetime {
|
||||
#[inline]
|
||||
fn submillis_precision(&self) -> SubmillisPrecision {
|
||||
self.submillis_prec
|
||||
}
|
||||
|
||||
delegate! {
|
||||
delegate::delegate! {
|
||||
to self.unix_conversion {
|
||||
#[inline]
|
||||
fn ms_of_day(&self) -> u32;
|
||||
@ -328,8 +297,9 @@ impl CdsCommon for ConversionFromChronoDatetime {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "chrono")]
|
||||
impl CdsConverter for ConversionFromChronoDatetime {
|
||||
delegate! {to self.unix_conversion {
|
||||
delegate::delegate! {to self.unix_conversion {
|
||||
#[inline]
|
||||
fn unix_days_seconds(&self) -> i64;
|
||||
}}
|
||||
@ -366,7 +336,6 @@ impl ConversionFromChronoDatetime {
|
||||
Self::new_generic(dt, SubmillisPrecision::Picoseconds)
|
||||
}
|
||||
|
||||
#[cfg(feature = "chrono")]
|
||||
fn new_generic(
|
||||
dt: &chrono::DateTime<chrono::Utc>,
|
||||
prec: SubmillisPrecision,
|
||||
@ -448,7 +417,7 @@ impl CdsCommon for ConversionFromNow {
|
||||
fn submillis_precision(&self) -> SubmillisPrecision {
|
||||
self.submillis_prec
|
||||
}
|
||||
delegate! {
|
||||
delegate::delegate! {
|
||||
to self.unix_conversion {
|
||||
fn ms_of_day(&self) -> u32;
|
||||
fn ccsds_days_as_u32(&self) -> u32;
|
||||
@ -462,7 +431,7 @@ impl CdsCommon for ConversionFromNow {
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl CdsConverter for ConversionFromNow {
|
||||
delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
|
||||
delegate::delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
@ -1622,6 +1591,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now() {
|
||||
let timestamp_now = CdsTime::now_with_u16_days().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1629,6 +1599,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_us_prec() {
|
||||
let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1636,6 +1607,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec() {
|
||||
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1643,6 +1615,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec_u16_days() {
|
||||
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1650,6 +1623,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec_u24_days() {
|
||||
let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -2306,6 +2280,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_update_from_now() {
|
||||
let mut stamp = CdsTime::new_with_u16_days(0, 0);
|
||||
let _ = stamp.update_from_now();
|
||||
@ -2321,6 +2296,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "serde")]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_serialization() {
|
||||
let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time");
|
||||
let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed");
|
||||
|
@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};
|
||||
use core::fmt::{Debug, Display, Formatter};
|
||||
use core::ops::{Add, AddAssign};
|
||||
use core::time::Duration;
|
||||
use core::u64;
|
||||
|
||||
use crate::ByteConversionError;
|
||||
|
||||
@ -947,6 +946,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_datetime_now() {
|
||||
let now = chrono::Utc::now();
|
||||
let cuc_now = CucTime::now(FractionalResolution::SixtyNs, LEAP_SECONDS);
|
||||
@ -1278,6 +1278,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn set_fract_resolution() {
|
||||
let mut stamp = CucTime::new(2000);
|
||||
stamp.set_fractional_resolution(FractionalResolution::SixtyNs);
|
||||
|
@ -6,7 +6,6 @@ use core::cmp::Ordering;
|
||||
use core::fmt::{Display, Formatter};
|
||||
use core::ops::{Add, AddAssign, Sub};
|
||||
use core::time::Duration;
|
||||
use core::u8;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[cfg(not(feature = "std"))]
|
||||
@ -64,20 +63,12 @@ pub fn ccsds_time_code_from_p_field(pfield: u8) -> Result<CcsdsTimeCode, u8> {
|
||||
CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
#[error("date before ccsds epoch: {0:?}")]
|
||||
pub struct DateBeforeCcsdsEpochError(UnixTime);
|
||||
|
||||
impl Display for DateBeforeCcsdsEpochError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
write!(f, "date before ccsds epoch: {:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for DateBeforeCcsdsEpochError {}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
@ -551,6 +542,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_get_current_time() {
|
||||
let sec_floats = seconds_since_epoch();
|
||||
assert!(sec_floats > 0.0);
|
||||
@ -565,6 +557,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ccsds_epoch() {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
@ -685,6 +678,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_from_now() {
|
||||
let stamp_now = UnixTime::now().unwrap();
|
||||
let dt_now = stamp_now.chrono_date_time().unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user