Compare commits

..

4 Commits

Author SHA1 Message Date
35d48671fb tested basic frame parser
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-10-08 16:24:16 +02:00
0a7fa4ecf0 continue USLP support
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-10-08 15:34:47 +02:00
77003ccfe3
add more USLP API
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-10-07 17:31:11 +02:00
7d87c0fbc7
add basic USLP support
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
2024-10-07 15:27:45 +02:00
22 changed files with 1201 additions and 255 deletions

View File

@ -29,7 +29,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.81.0 - uses: dtolnay/rust-toolchain@1.70.0
- run: cargo check --release - run: cargo check --release
cross-check: cross-check:

View File

@ -8,23 +8,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased] # [unreleased]
# [v0.13.1] 2025-03-21
- Bugfix due to operator precendence for `PusTcSecondaryHeader::pus_version`,
`PusTcSecondaryHeaderWithoutTimestamp::pus_version`, `CdsTime::from_bytes_with_u16_days` and
`CdsTime::from_bytes_with_u24_days`
# [v0.13.0] 2024-11-08
- Bumped MSRV to 1.81.0
- Bump `zerocopy` to v0.8.0
- Bump `thiserror` to v2.0.0
## Changed
- Migrated all Error implementations to thiserror, improved some naming and error handling in
general
# [v0.12.0] 2024-09-10 # [v0.12.0] 2024-09-10
- Bumped MSRV to 1.70.0 - Bumped MSRV to 1.70.0
@ -567,7 +550,3 @@ The timestamp of `PusTm` is now optional. See Added and Changed section for deta
Initial release with CCSDS Space Packet Primary Header implementation and basic PUS TC and TM Initial release with CCSDS Space Packet Primary Header implementation and basic PUS TC and TM
implementations. implementations.
[v0.13.1]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.13.0...v0.13.1
[v0.13.0]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.12.0...v0.13.0
[v0.12.0]: https://egit.irs.uni-stuttgart.de/rust/spacepackets/compare/v0.11.2...v0.12.0

View File

@ -1,8 +1,8 @@
[package] [package]
name = "spacepackets" name = "spacepackets"
version = "0.13.1" version = "0.12.0"
edition = "2021" edition = "2021"
rust-version = "1.81.0" rust-version = "1.70.0"
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"] authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
description = "Generic implementations for various CCSDS and ECSS packet standards" description = "Generic implementations for various CCSDS and ECSS packet standards"
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets" homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
@ -18,12 +18,12 @@ delegate = ">=0.8, <=0.13"
paste = "1" paste = "1"
[dependencies.zerocopy] [dependencies.zerocopy]
version = "0.8" version = "0.7"
features = ["derive"] features = ["derive"]
[dependencies.thiserror] [dependencies.thiserror]
version = "2" version = "1"
default-features = false optional = true
[dependencies.num_enum] [dependencies.num_enum]
version = ">0.5, <=0.7" version = ">0.5, <=0.7"
@ -53,16 +53,18 @@ default-features = false
version = "0.3" version = "0.3"
optional = true optional = true
[dev-dependencies]
postcard = "1"
chrono = "0.4"
[features] [features]
default = ["std"] default = ["std"]
std = ["alloc", "chrono/std", "chrono/clock", "thiserror/std"] std = ["chrono/std", "chrono/clock", "alloc", "thiserror"]
serde = ["dep:serde", "chrono?/serde"] serde = ["dep:serde", "chrono/serde"]
alloc = ["chrono?/alloc", "defmt?/alloc", "serde?/alloc"] alloc = ["postcard/alloc", "chrono/alloc", "defmt/alloc", "serde/alloc"]
chrono = ["dep:chrono"]
timelib = ["dep:time"] timelib = ["dep:time"]
defmt = ["dep:defmt"]
[dev-dependencies]
postcard = { version = "1", features = ["alloc"] }
chrono = "0.4"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@ -1 +0,0 @@
github: robamu

View File

@ -1,3 +0,0 @@
#!/bin/sh
export RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options"
cargo +nightly doc --all-features --open

View File

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::string::String; use std::string::String;
use super::TlvLvDataTooLargeError; use super::TlvLvDataTooLarge;
pub const MIN_LV_LEN: usize = 1; pub const MIN_LV_LEN: usize = 1;
@ -64,9 +64,9 @@ pub(crate) fn generic_len_check_deserialization(
impl<'data> Lv<'data> { impl<'data> Lv<'data> {
#[inline] #[inline]
pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLargeError> { pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLarge> {
if data.len() > u8::MAX as usize { if data.len() > u8::MAX as usize {
return Err(TlvLvDataTooLargeError(data.len())); return Err(TlvLvDataTooLarge(data.len()));
} }
Ok(Lv { Ok(Lv {
data, data,
@ -86,7 +86,7 @@ impl<'data> Lv<'data> {
/// Helper function to build a string LV. This is especially useful for the file or directory /// Helper function to build a string LV. This is especially useful for the file or directory
/// path LVs /// path LVs
#[inline] #[inline]
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLargeError> { pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLarge> {
Self::new(str_slice.as_bytes()) Self::new(str_slice.as_bytes())
} }
@ -94,7 +94,7 @@ impl<'data> Lv<'data> {
/// path LVs /// path LVs
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[inline] #[inline]
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLargeError> { pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLarge> {
Self::new(string.as_bytes()) Self::new(string.as_bytes())
} }

View File

@ -1,8 +1,11 @@
//! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf). //! Low-level CCSDS File Delivery Protocol (CFDP) support according to [CCSDS 727.0-B-5](https://public.ccsds.org/Pubs/727x0b5.pdf).
use crate::ByteConversionError; use crate::ByteConversionError;
use core::fmt::{Display, Formatter};
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod lv; pub mod lv;
pub mod pdu; pub mod pdu;
@ -173,43 +176,97 @@ impl Default for ChecksumType {
pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4]; pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4];
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("data with size {0} larger than allowed {max} bytes", max = u8::MAX)] pub struct TlvLvDataTooLarge(pub usize);
pub struct TlvLvDataTooLargeError(pub usize);
/// First value: Found value. Second value: Expected value if there is one. impl Display for TlvLvDataTooLarge {
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] write!(
#[cfg_attr(feature = "defmt", derive(defmt::Format))] f,
#[error("invalid TLV type field, found {found}, expected {expected:?}")] "data with size {} larger than allowed {} bytes",
pub struct InvalidTlvTypeFieldError { self.0,
found: u8, u8::MAX
expected: Option<u8>, )
}
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[cfg(feature = "std")]
impl Error for TlvLvDataTooLarge {}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum TlvLvError { pub enum TlvLvError {
#[error("{0}")] DataTooLarge(TlvLvDataTooLarge),
DataTooLarge(#[from] TlvLvDataTooLargeError), ByteConversion(ByteConversionError),
#[error("byte conversion error: {0}")] /// First value: Found value. Second value: Expected value if there is one.
ByteConversion(#[from] ByteConversionError), InvalidTlvTypeField {
#[error("{0}")] found: u8,
InvalidTlvTypeField(#[from] InvalidTlvTypeFieldError), expected: Option<u8>,
#[error("invalid value length {0}")] },
/// Logically invalid value length detected. The value length may not exceed 255 bytes.
/// Depending on the concrete TLV type, the value length may also be logically invalid.
InvalidValueLength(usize), InvalidValueLength(usize),
/// Only applies to filestore requests and responses. Second name was missing where one is /// Only applies to filestore requests and responses. Second name was missing where one is
/// expected. /// expected.
#[error("second name missing for filestore request or response")]
SecondNameMissing, SecondNameMissing,
/// Invalid action code for filestore requests or responses. /// Invalid action code for filestore requests or responses.
#[error("invalid action code {0}")]
InvalidFilestoreActionCode(u8), InvalidFilestoreActionCode(u8),
} }
impl From<TlvLvDataTooLarge> for TlvLvError {
fn from(value: TlvLvDataTooLarge) -> Self {
Self::DataTooLarge(value)
}
}
impl From<ByteConversionError> for TlvLvError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl Display for TlvLvError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
TlvLvError::DataTooLarge(e) => {
write!(f, "{}", e)
}
TlvLvError::ByteConversion(e) => {
write!(f, "tlv or lv byte conversion: {}", e)
}
TlvLvError::InvalidTlvTypeField { found, expected } => {
write!(
f,
"invalid TLV type field, found {found}, expected {expected:?}"
)
}
TlvLvError::InvalidValueLength(len) => {
write!(f, "invalid value length {len}")
}
TlvLvError::SecondNameMissing => {
write!(f, "second name missing for filestore request or response")
}
TlvLvError::InvalidFilestoreActionCode(raw) => {
write!(f, "invalid filestore action code with raw value {raw}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for TlvLvError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
TlvLvError::DataTooLarge(e) => Some(e),
TlvLvError::ByteConversion(e) => Some(e),
_ => None,
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -297,7 +297,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = EofPdu::from_bytes(&buf).unwrap_err(); let error = EofPdu::from_bytes(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");

View File

@ -377,7 +377,7 @@ impl CfdpPdu for FileDataPduCreatorWithReservedDatafield<'_> {
} }
} }
/// This structure is created with [FileDataPduCreatorWithReservedDatafield::write_to_bytes_partially] /// This structure is created with [FileDataPduCreatorReservedDatafield::write_to_bytes_partially]
/// and provides an API to read file data from the virtual filesystem into the file data PDU buffer /// and provides an API to read file data from the virtual filesystem into the file data PDU buffer
/// directly. /// directly.
/// ///
@ -544,7 +544,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) | buf[written - 1] as u16;
let error = FileDataPdu::from_bytes(&buf).unwrap_err(); let error = FileDataPdu::from_bytes(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");
@ -753,7 +753,7 @@ mod tests {
assert!(pdu_reader_error.is_err()); assert!(pdu_reader_error.is_err());
let error = pdu_reader_error.unwrap_err(); let error = pdu_reader_error.unwrap_err();
match error { match error {
PduError::Checksum(_) => (), PduError::ChecksumError(_) => (),
_ => { _ => {
panic!("unexpected PDU error {}", error) panic!("unexpected PDU error {}", error)
} }

View File

@ -4,14 +4,14 @@ use crate::cfdp::pdu::{
use crate::cfdp::tlv::{ use crate::cfdp::tlv::{
EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv, EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv,
}; };
use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType}; use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType, TlvLvError};
use crate::ByteConversionError; use crate::ByteConversionError;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv; use super::tlv::ReadableTlv;
use super::{CfdpPdu, InvalidTlvTypeFieldError, WritablePduPacket}; use super::{CfdpPdu, WritablePduPacket};
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -332,26 +332,22 @@ impl<'buf> FinishedPduReader<'buf> {
// last TLV, everything else would break the whole handling of the packet // last TLV, everything else would break the whole handling of the packet
// TLVs. // TLVs.
if current_idx != full_len_without_crc { if current_idx != full_len_without_crc {
return Err(PduError::Format); return Err(PduError::FormatError);
} }
} else { } else {
return Err(PduError::TlvLv( return Err(TlvLvError::InvalidTlvTypeField {
InvalidTlvTypeFieldError { found: tlv_type.into(),
found: tlv_type.into(), expected: Some(TlvType::FilestoreResponse.into()),
expected: Some(TlvType::FilestoreResponse.into()), }
} .into());
.into(),
));
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(PduError::TlvLv( return Err(TlvLvError::InvalidTlvTypeField {
InvalidTlvTypeFieldError { found: raw,
found: raw, expected: None,
expected: None, }
} .into());
.into(),
));
} }
} }
} }
@ -568,7 +564,7 @@ mod tests {
buf[written - 1] -= 1; buf[written - 1] -= 1;
let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16;
let error = FinishedPduReader::new(&buf).unwrap_err(); let error = FinishedPduReader::new(&buf).unwrap_err();
if let PduError::Checksum(e) = error { if let PduError::ChecksumError(e) = error {
assert_eq!(e, crc); assert_eq!(e, crc);
} else { } else {
panic!("expected crc error"); panic!("expected crc error");

View File

@ -720,7 +720,7 @@ pub mod tests {
fn test_with_owned_opts() { fn test_with_owned_opts() {
let tlv1 = TlvOwned::new_empty(TlvType::FlowLabel); let tlv1 = TlvOwned::new_empty(TlvType::FlowLabel);
let msg_to_user: [u8; 4] = [1, 2, 3, 4]; let msg_to_user: [u8; 4] = [1, 2, 3, 4];
let tlv2 = TlvOwned::new(TlvType::MsgToUser, &msg_to_user); let tlv2 = TlvOwned::new(TlvType::MsgToUser, &msg_to_user).unwrap();
let mut all_tlvs = tlv1.to_vec(); let mut all_tlvs = tlv1.to_vec();
all_tlvs.extend(tlv2.to_vec()); all_tlvs.extend(tlv2.to_vec());
let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu( let (src_filename, dest_filename, metadata_pdu) = generic_metadata_pdu(
@ -780,7 +780,7 @@ pub mod tests {
assert_eq!(expected, Some(FileDirectiveType::MetadataPdu)); assert_eq!(expected, Some(FileDirectiveType::MetadataPdu));
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid directive type, found 255, expected Some(MetadataPdu)" "invalid directive type value 255, expected Some(MetadataPdu)"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);
@ -806,7 +806,7 @@ pub mod tests {
assert_eq!(expected, FileDirectiveType::MetadataPdu); assert_eq!(expected, FileDirectiveType::MetadataPdu);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"wrong directive type, found EofPdu, expected MetadataPdu" "found directive type EofPdu, expected MetadataPdu"
); );
} else { } else {
panic!("Expected InvalidDirectiveType error, got {:?}", error); panic!("Expected InvalidDirectiveType error, got {:?}", error);

View File

@ -5,6 +5,9 @@ use crate::ByteConversionError;
use crate::CRC_CCITT_FALSE; use crate::CRC_CCITT_FALSE;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::{Display, Formatter};
#[cfg(feature = "std")]
use std::error::Error;
pub mod ack; pub mod ack;
pub mod eof; pub mod eof;
@ -27,62 +30,137 @@ pub enum FileDirectiveType {
KeepAlivePdu = 0x0c, KeepAlivePdu = 0x0c,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PduError { pub enum PduError {
#[error("byte conversion error: {0}")] ByteConversion(ByteConversionError),
ByteConversion(#[from] ByteConversionError), /// Found version ID invalid, not equal to [CFDP_VERSION_2].
/// Found version ID invalid, not equal to [super::CFDP_VERSION_2].
#[error("CFDP version missmatch, found {0}, expected {ver}", ver = super::CFDP_VERSION_2)]
CfdpVersionMissmatch(u8), CfdpVersionMissmatch(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid PDU entity ID length {0}, only [1, 2, 4, 8] are allowed")]
InvalidEntityLen(u8), InvalidEntityLen(u8),
/// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported.
#[error("invalid transaction ID length {0}")]
InvalidTransactionSeqNumLen(u8), InvalidTransactionSeqNumLen(u8),
#[error(
"missmatch of PDU source ID length {src_id_len} and destination ID length {dest_id_len}"
)]
SourceDestIdLenMissmatch { SourceDestIdLenMissmatch {
src_id_len: usize, src_id_len: usize,
dest_id_len: usize, dest_id_len: usize,
}, },
/// Wrong directive type, for example when parsing the directive field for a file directive /// Wrong directive type, for example when parsing the directive field for a file directive
/// PDU. /// PDU.
#[error("wrong directive type, found {found:?}, expected {expected:?}")]
WrongDirectiveType { WrongDirectiveType {
found: FileDirectiveType, found: FileDirectiveType,
expected: FileDirectiveType, expected: FileDirectiveType,
}, },
/// The directive type field contained a value not in the range of permitted values. This can /// The directive type field contained a value not in the range of permitted values. This can
/// also happen if an invalid value is passed to the ACK PDU constructor. /// also happen if an invalid value is passed to the ACK PDU constructor.
#[error("invalid directive type, found {found:?}, expected {expected:?}")]
InvalidDirectiveType { InvalidDirectiveType {
found: u8, found: u8,
expected: Option<FileDirectiveType>, expected: Option<FileDirectiveType>,
}, },
#[error("invalid start or end of scope value for NAK PDU")]
InvalidStartOrEndOfScopeValue, InvalidStartOrEndOfScopeValue,
/// Invalid condition code. Contains the raw detected value. /// Invalid condition code. Contains the raw detected value.
#[error("invalid condition code {0}")]
InvalidConditionCode(u8), InvalidConditionCode(u8),
/// Invalid checksum type which is not part of the checksums listed in the /// Invalid checksum type which is not part of the checksums listed in the
/// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/). /// [SANA Checksum Types registry](https://sanaregistry.org/r/checksum_identifiers/).
#[error("invalid checksum type {0}")]
InvalidChecksumType(u8), InvalidChecksumType(u8),
#[error("file size {0} too large")]
FileSizeTooLarge(u64), FileSizeTooLarge(u64),
/// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC. /// If the CRC flag for a PDU is enabled and the checksum check fails. Contains raw 16-bit CRC.
#[error("checksum error for checksum {0}")] ChecksumError(u16),
Checksum(u16),
/// Generic error for invalid PDU formats. /// Generic error for invalid PDU formats.
#[error("generic PDU format error")] FormatError,
Format,
/// Error handling a TLV field. /// Error handling a TLV field.
#[error("PDU error: {0}")] TlvLvError(TlvLvError),
TlvLv(#[from] TlvLvError), }
impl Display for PduError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PduError::InvalidEntityLen(raw_id) => {
write!(
f,
"invalid PDU entity ID length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::InvalidStartOrEndOfScopeValue => {
write!(f, "invalid start or end of scope for NAK PDU")
}
PduError::InvalidTransactionSeqNumLen(raw_id) => {
write!(
f,
"invalid PDUtransaction seq num length {raw_id}, only [1, 2, 4, 8] are allowed"
)
}
PduError::CfdpVersionMissmatch(raw) => {
write!(
f,
"cfdp version missmatch, found {raw}, expected {CFDP_VERSION_2}"
)
}
PduError::SourceDestIdLenMissmatch {
src_id_len,
dest_id_len,
} => {
write!(
f,
"missmatch of PDU source length {src_id_len} and destination length {dest_id_len}"
)
}
PduError::ByteConversion(e) => {
write!(f, "{}", e)
}
PduError::FileSizeTooLarge(value) => {
write!(f, "file size value {value} exceeds allowed 32 bit width")
}
PduError::WrongDirectiveType { found, expected } => {
write!(f, "found directive type {found:?}, expected {expected:?}")
}
PduError::InvalidConditionCode(raw_code) => {
write!(f, "found invalid condition code with raw value {raw_code}")
}
PduError::InvalidDirectiveType { found, expected } => {
write!(
f,
"invalid directive type value {found}, expected {expected:?}"
)
}
PduError::InvalidChecksumType(checksum_type) => {
write!(f, "invalid checksum type {checksum_type}")
}
PduError::ChecksumError(checksum) => {
write!(f, "checksum error for CRC {checksum:#04x}")
}
PduError::TlvLvError(error) => {
write!(f, "pdu tlv error: {error}")
}
PduError::FormatError => {
write!(f, "generic PDU format error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PduError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
PduError::ByteConversion(e) => Some(e),
PduError::TlvLvError(e) => Some(e),
_ => None,
}
}
}
impl From<ByteConversionError> for PduError {
#[inline]
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl From<TlvLvError> for PduError {
#[inline]
fn from(e: TlvLvError) -> Self {
Self::TlvLvError(e)
}
} }
pub trait WritablePduPacket { pub trait WritablePduPacket {
@ -454,7 +532,7 @@ impl PduHeader {
let mut digest = CRC_CCITT_FALSE.digest(); let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&buf[..self.pdu_len()]); digest.update(&buf[..self.pdu_len()]);
if digest.finalize() != 0 { if digest.finalize() != 0 {
return Err(PduError::Checksum(u16::from_be_bytes( return Err(PduError::ChecksumError(u16::from_be_bytes(
buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(), buf[self.pdu_len() - 2..self.pdu_len()].try_into().unwrap(),
))); )));
} }
@ -903,7 +981,7 @@ mod tests {
assert_eq!(raw_version, CFDP_VERSION_2 + 1); assert_eq!(raw_version, CFDP_VERSION_2 + 1);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"CFDP version missmatch, found 2, expected 1" "cfdp version missmatch, found 2, expected 1"
); );
} else { } else {
panic!("invalid exception: {}", error); panic!("invalid exception: {}", error);
@ -951,7 +1029,7 @@ mod tests {
assert_eq!(expected, 7); assert_eq!(expected, 7);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"byte conversion error: source slice with size 6 too small, expected at least 7 bytes" "source slice with size 6 too small, expected at least 7 bytes"
); );
} }
} }
@ -1006,7 +1084,7 @@ mod tests {
assert_eq!(dest_id_len, 2); assert_eq!(dest_id_len, 2);
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"missmatch of PDU source ID length 1 and destination ID length 2" "missmatch of PDU source length 1 and destination length 2"
); );
} }
} }

View File

@ -61,7 +61,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u32, start_of_scope: u32,
end_of_scope: u32, end_of_scope: u32,
segment_requests: &'seg_reqs [(u32, u32)], segment_requests: &'seg_reqs [(u32, u32)],
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
let mut passed_segment_requests = None; let mut passed_segment_requests = None;
if !segment_requests.is_empty() { if !segment_requests.is_empty() {
passed_segment_requests = Some(SegmentRequests::U32Pairs(segment_requests)); passed_segment_requests = Some(SegmentRequests::U32Pairs(segment_requests));
@ -79,7 +79,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u64, start_of_scope: u64,
end_of_scope: u64, end_of_scope: u64,
segment_requests: &'seg_reqs [(u64, u64)], segment_requests: &'seg_reqs [(u64, u64)],
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
let mut passed_segment_requests = None; let mut passed_segment_requests = None;
if !segment_requests.is_empty() { if !segment_requests.is_empty() {
passed_segment_requests = Some(SegmentRequests::U64Pairs(segment_requests)); passed_segment_requests = Some(SegmentRequests::U64Pairs(segment_requests));
@ -97,7 +97,7 @@ impl<'seg_reqs> NakPduCreator<'seg_reqs> {
start_of_scope: u64, start_of_scope: u64,
end_of_scope: u64, end_of_scope: u64,
segment_requests: Option<SegmentRequests<'seg_reqs>>, segment_requests: Option<SegmentRequests<'seg_reqs>>,
) -> Result<NakPduCreator<'seg_reqs>, PduError> { ) -> Result<NakPduCreator, PduError> {
// Force correct direction flag. // Force correct direction flag.
pdu_header.pdu_conf.direction = Direction::TowardsSender; pdu_header.pdu_conf.direction = Direction::TowardsSender;
if let Some(ref segment_requests) = segment_requests { if let Some(ref segment_requests) = segment_requests {
@ -269,7 +269,7 @@ impl SegReqFromBytes for u64 {
} }
} }
impl<T> Iterator for SegmentRequestIter<'_, T> impl<'a, T> Iterator for SegmentRequestIter<'a, T>
where where
T: SegReqFromBytes, T: SegReqFromBytes,
{ {
@ -282,8 +282,8 @@ where
} }
} }
impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u32> { impl<'a, 'b> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'b, u32> {
fn eq(&self, other: &SegmentRequests<'a>) -> bool { fn eq(&self, other: &SegmentRequests) -> bool {
match other { match other {
SegmentRequests::U32Pairs(pairs) => self.compare_pairs(pairs), SegmentRequests::U32Pairs(pairs) => self.compare_pairs(pairs),
SegmentRequests::U64Pairs(pairs) => { SegmentRequests::U64Pairs(pairs) => {
@ -296,8 +296,8 @@ impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u32> {
} }
} }
impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u64> { impl<'a, 'b> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'b, u64> {
fn eq(&self, other: &SegmentRequests<'a>) -> bool { fn eq(&self, other: &SegmentRequests) -> bool {
match other { match other {
SegmentRequests::U32Pairs(pairs) => { SegmentRequests::U32Pairs(pairs) => {
if pairs.is_empty() && self.seq_req_raw.is_empty() { if pairs.is_empty() && self.seq_req_raw.is_empty() {
@ -310,7 +310,7 @@ impl<'a> PartialEq<SegmentRequests<'a>> for SegmentRequestIter<'_, u64> {
} }
} }
impl<T> SegmentRequestIter<'_, T> impl<'a, T> SegmentRequestIter<'a, T>
where where
T: SegReqFromBytes + PartialEq, T: SegReqFromBytes + PartialEq,
{ {
@ -374,11 +374,11 @@ impl CfdpPdu for NakPduReader<'_> {
} }
impl<'seg_reqs> NakPduReader<'seg_reqs> { impl<'seg_reqs> NakPduReader<'seg_reqs> {
pub fn new(buf: &'seg_reqs [u8]) -> Result<NakPduReader<'seg_reqs>, PduError> { pub fn new(buf: &'seg_reqs [u8]) -> Result<NakPduReader, PduError> {
Self::from_bytes(buf) Self::from_bytes(buf)
} }
pub fn from_bytes(buf: &'seg_reqs [u8]) -> Result<NakPduReader<'seg_reqs>, PduError> { pub fn from_bytes(buf: &'seg_reqs [u8]) -> Result<NakPduReader, PduError> {
let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?; let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?;
let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?; let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?;
// Minimum length of 9: 1 byte directive field and start and end of scope for normal file // Minimum length of 9: 1 byte directive field and start and end of scope for normal file
@ -474,7 +474,7 @@ impl<'seg_reqs> NakPduReader<'seg_reqs> {
} }
} }
impl<'a> PartialEq<NakPduCreator<'a>> for NakPduReader<'_> { impl<'a, 'b> PartialEq<NakPduCreator<'a>> for NakPduReader<'b> {
fn eq(&self, other: &NakPduCreator<'a>) -> bool { fn eq(&self, other: &NakPduCreator<'a>) -> bool {
if self.pdu_header() != other.pdu_header() if self.pdu_header() != other.pdu_header()
|| self.end_of_scope() != other.end_of_scope() || self.end_of_scope() != other.end_of_scope()
@ -751,7 +751,7 @@ mod tests {
if let PduError::InvalidStartOrEndOfScopeValue = error { if let PduError::InvalidStartOrEndOfScopeValue = error {
assert_eq!( assert_eq!(
error.to_string(), error.to_string(),
"invalid start or end of scope value for NAK PDU" "invalid start or end of scope for NAK PDU"
); );
} else { } else {
panic!("unexpected error {error}"); panic!("unexpected error {error}");
@ -796,7 +796,7 @@ mod tests {
nak_vec[nak_pdu.len_written() - 1] -= 1; nak_vec[nak_pdu.len_written() - 1] -= 1;
let nak_pdu_deser = NakPduReader::new(&nak_vec); let nak_pdu_deser = NakPduReader::new(&nak_vec);
assert!(nak_pdu_deser.is_err()); assert!(nak_pdu_deser.is_err());
if let Err(PduError::Checksum(raw)) = nak_pdu_deser { if let Err(PduError::ChecksumError(raw)) = nak_pdu_deser {
assert_eq!( assert_eq!(
raw, raw,
u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap()) u16::from_be_bytes(nak_vec[nak_pdu.len_written() - 2..].try_into().unwrap())

View File

@ -15,7 +15,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError}; use super::TlvLvDataTooLarge;
pub mod msg_to_user; pub mod msg_to_user;
@ -153,14 +153,14 @@ pub struct Tlv<'data> {
} }
impl<'data> Tlv<'data> { impl<'data> Tlv<'data> {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> { pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLarge> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Standard(tlv_type), tlv_type_field: TlvTypeField::Standard(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
}) })
} }
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLargeError> { pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLarge> {
Ok(Tlv { Ok(Tlv {
tlv_type_field: TlvTypeField::Custom(tlv_type), tlv_type_field: TlvTypeField::Custom(tlv_type),
lv: Lv::new(data)?, lv: Lv::new(data)?,
@ -179,7 +179,7 @@ impl<'data> Tlv<'data> {
/// bytestream with the exact size of the expected TLV. This function will take care /// bytestream with the exact size of the expected TLV. This function will take care
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using /// of parsing the length byte, and the length of the parsed TLV can be retrieved using
/// [Self::len_full]. /// [Self::len_full].
pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, ByteConversionError> { pub fn from_bytes(buf: &'data [u8]) -> Result<Tlv<'data>, TlvLvError> {
generic_len_check_deserialization(buf, MIN_TLV_LEN)?; generic_len_check_deserialization(buf, MIN_TLV_LEN)?;
let mut tlv = Self { let mut tlv = Self {
tlv_type_field: TlvTypeField::from(buf[0]), tlv_type_field: TlvTypeField::from(buf[0]),
@ -239,6 +239,8 @@ impl GenericTlv for Tlv<'_> {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub mod alloc_mod { pub mod alloc_mod {
use crate::cfdp::TlvLvDataTooLarge;
use super::*; use super::*;
/// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime /// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime
@ -252,18 +254,24 @@ pub mod alloc_mod {
} }
impl TlvOwned { impl TlvOwned {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Self { pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Self, TlvLvDataTooLarge> {
Self { if data.len() > u8::MAX as usize {
return Err(TlvLvDataTooLarge(data.len()));
}
Ok(Self {
tlv_type_field: TlvTypeField::Standard(tlv_type), tlv_type_field: TlvTypeField::Standard(tlv_type),
data: data.to_vec(), data: data.to_vec(),
} })
} }
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Self { pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Self, TlvLvDataTooLarge> {
Self { if data.len() > u8::MAX as usize {
return Err(TlvLvDataTooLarge(data.len()));
}
Ok(Self {
tlv_type_field: TlvTypeField::Custom(tlv_type), tlv_type_field: TlvTypeField::Custom(tlv_type),
data: data.to_vec(), data: data.to_vec(),
} })
} }
/// Creates a TLV with an empty value field. /// Creates a TLV with an empty value field.
@ -335,7 +343,7 @@ impl EntityIdTlv {
Self { entity_id } Self { entity_id }
} }
fn len_check(buf: &[u8]) -> Result<(), ByteConversionError> { fn check_min_len(buf: &[u8]) -> Result<(), ByteConversionError> {
if buf.len() < 2 { if buf.len() < 2 {
return Err(ByteConversionError::ToSliceTooSmall { return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(), found: buf.len(),
@ -358,7 +366,7 @@ impl EntityIdTlv {
} }
pub fn from_bytes(buf: &[u8]) -> Result<Self, TlvLvError> { pub fn from_bytes(buf: &[u8]) -> Result<Self, TlvLvError> {
Self::len_check(buf)?; Self::check_min_len(buf)?;
verify_tlv_type(buf[0], TlvType::EntityId)?; verify_tlv_type(buf[0], TlvType::EntityId)?;
let len = buf[1]; let len = buf[1];
if len != 1 && len != 2 && len != 4 && len != 8 { if len != 1 && len != 2 && len != 4 && len != 8 {
@ -369,19 +377,31 @@ impl EntityIdTlv {
Ok(Self { entity_id }) Ok(Self { entity_id })
} }
/// Convert to a generic [Tlv], which also erases the programmatic type information. /// Convert to a generic [Tlv], which also erases the type information.
pub fn to_tlv(self, buf: &mut [u8]) -> Result<Tlv, ByteConversionError> { pub fn to_tlv(self, buf: &mut [u8]) -> Result<Tlv, ByteConversionError> {
Self::len_check(buf)?; Self::check_min_len(buf)?;
self.entity_id self.entity_id
.write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?; .write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?;
// Can't fail. if buf.len() < self.len_value() {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: self.len_value(),
});
}
// We performed all checks necessary to ensure this call never panics.
Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap()) Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap())
} }
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
// Unwrap is okay here, entity ID should never be larger than maximum allowed size.
TlvOwned::new(TlvType::EntityId, &self.entity_id.to_vec()).unwrap()
}
} }
impl WritableTlv for EntityIdTlv { impl WritableTlv for EntityIdTlv {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> { fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
Self::len_check(buf)?; Self::check_min_len(buf)?;
buf[0] = TlvType::EntityId as u8; buf[0] = TlvType::EntityId as u8;
buf[1] = self.entity_id.size() as u8; buf[1] = self.entity_id.size() as u8;
Ok(2 + self.entity_id.write_to_be_bytes(&mut buf[2..])?) Ok(2 + self.entity_id.write_to_be_bytes(&mut buf[2..])?)
@ -398,26 +418,24 @@ impl GenericTlv for EntityIdTlv {
} }
} }
impl TryFrom<Tlv<'_>> for EntityIdTlv { impl<'data> TryFrom<Tlv<'data>> for EntityIdTlv {
type Error = TlvLvError; type Error = TlvLvError;
fn try_from(value: Tlv) -> Result<Self, TlvLvError> { fn try_from(value: Tlv) -> Result<Self, Self::Error> {
match value.tlv_type_field { match value.tlv_type_field {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::EntityId { if tlv_type != TlvType::EntityId {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
} });
.into());
} }
} }
TlvTypeField::Custom(val) => { TlvTypeField::Custom(val) => {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: val, found: val,
expected: Some(TlvType::EntityId as u8), expected: Some(TlvType::EntityId as u8),
} });
.into());
} }
} }
let len_value = value.value().len(); let len_value = value.value().len();
@ -622,6 +640,12 @@ impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> {
}, },
}) })
} }
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
// The API should ensure the data field is never too large, so unwrapping here is okay.
TlvOwned::new(TlvType::FilestoreRequest, &self.to_vec()[2..]).unwrap()
}
} }
impl WritableTlv for FilestoreRequestTlv<'_, '_> { impl WritableTlv for FilestoreRequestTlv<'_, '_> {
@ -807,6 +831,12 @@ impl<'first_name, 'second_name, 'fs_msg> FilestoreResponseTlv<'first_name, 'seco
filestore_message, filestore_message,
}) })
} }
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
// The API should ensure the data field is never too large, so unwrap is okay here.
TlvOwned::new(TlvType::FilestoreResponse, &self.to_vec()[2..]).unwrap()
}
} }
impl WritableTlv for FilestoreResponseTlv<'_, '_, '_> { impl WritableTlv for FilestoreResponseTlv<'_, '_, '_> {
@ -848,16 +878,13 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
} }
} }
pub(crate) fn verify_tlv_type( pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
raw_type: u8, let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
expected_tlv_type: TlvType,
) -> Result<(), InvalidTlvTypeFieldError> {
let tlv_type = TlvType::try_from(raw_type).map_err(|_| InvalidTlvTypeFieldError {
found: raw_type, found: raw_type,
expected: Some(expected_tlv_type.into()), expected: Some(expected_tlv_type.into()),
})?; })?;
if tlv_type != expected_tlv_type { if tlv_type != expected_tlv_type {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(expected_tlv_type as u8), expected: Some(expected_tlv_type as u8),
}); });
@ -1052,15 +1079,11 @@ mod tests {
let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large); let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large);
assert!(tlv_res.is_err()); assert!(tlv_res.is_err());
let error = tlv_res.unwrap_err(); let error = tlv_res.unwrap_err();
match error { assert_eq!(error.0, u8::MAX as usize + 1);
TlvLvDataTooLargeError(size) => { assert_eq!(
assert_eq!(size, u8::MAX as usize + 1); error.to_string(),
assert_eq!( "data with size 256 larger than allowed 255 bytes"
error.to_string(), );
"data with size 256 larger than allowed 255 bytes"
);
}
}
} }
#[test] #[test]
@ -1369,8 +1392,7 @@ mod tests {
let error = EntityIdTlv::try_from(msg_to_user_tlv); let error = EntityIdTlv::try_from(msg_to_user_tlv);
assert!(error.is_err()); assert!(error.is_err());
let error = error.unwrap_err(); let error = error.unwrap_err();
if let TlvLvError::InvalidTlvTypeField(InvalidTlvTypeFieldError { found, expected }) = error if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
{
assert_eq!(found, TlvType::MsgToUser as u8); assert_eq!(found, TlvType::MsgToUser as u8);
assert_eq!(expected, Some(TlvType::EntityId as u8)); assert_eq!(expected, Some(TlvType::EntityId as u8));
assert_eq!( assert_eq!(
@ -1435,7 +1457,7 @@ mod tests {
let entity_id = UbfU8::new(5); let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4]; let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok()); assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]); let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]).expect("creating TLV failed");
assert_eq!( assert_eq!(
tlv_res.tlv_type_field(), tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::EntityId) TlvTypeField::Standard(TlvType::EntityId)
@ -1462,7 +1484,7 @@ mod tests {
#[test] #[test]
fn test_owned_tlv_custom_type() { fn test_owned_tlv_custom_type() {
let tlv_res = TlvOwned::new_with_custom_type(32, &[]); let tlv_res = TlvOwned::new_with_custom_type(32, &[]).unwrap();
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32)); assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
assert_eq!(tlv_res.len_full(), 2); assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0); assert_eq!(tlv_res.value().len(), 0);

View File

@ -2,10 +2,7 @@
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use super::TlvOwned; use super::TlvOwned;
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv}; use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use crate::{ use crate::{cfdp::TlvLvDataTooLarge, ByteConversionError};
cfdp::{InvalidTlvTypeFieldError, TlvLvDataTooLargeError},
ByteConversionError,
};
use delegate::delegate; use delegate::delegate;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -15,7 +12,7 @@ pub struct MsgToUserTlv<'data> {
impl<'data> MsgToUserTlv<'data> { impl<'data> MsgToUserTlv<'data> {
/// Create a new message to user TLV where the type field is set correctly. /// Create a new message to user TLV where the type field is set correctly.
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLargeError> { pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLarge> {
Ok(Self { Ok(Self {
tlv: Tlv::new(TlvType::MsgToUser, value)?, tlv: Tlv::new(TlvType::MsgToUser, value)?,
}) })
@ -65,19 +62,17 @@ impl<'data> MsgToUserTlv<'data> {
match msg_to_user.tlv.tlv_type_field() { match msg_to_user.tlv.tlv_type_field() {
TlvTypeField::Standard(tlv_type) => { TlvTypeField::Standard(tlv_type) => {
if tlv_type != TlvType::MsgToUser { if tlv_type != TlvType::MsgToUser {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8, found: tlv_type as u8,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
} });
.into());
} }
} }
TlvTypeField::Custom(raw) => { TlvTypeField::Custom(raw) => {
return Err(InvalidTlvTypeFieldError { return Err(TlvLvError::InvalidTlvTypeField {
found: raw, found: raw,
expected: Some(TlvType::MsgToUser as u8), expected: Some(TlvType::MsgToUser as u8),
} });
.into());
} }
} }
Ok(msg_to_user) Ok(msg_to_user)
@ -210,9 +205,9 @@ mod tests {
fn test_reserved_msg_deserialization_invalid_type() { fn test_reserved_msg_deserialization_invalid_type() {
let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3]; let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3];
let error = MsgToUserTlv::from_bytes(&trash).unwrap_err(); let error = MsgToUserTlv::from_bytes(&trash).unwrap_err();
if let TlvLvError::InvalidTlvTypeField(inner) = error { if let TlvLvError::InvalidTlvTypeField { found, expected } = error {
assert_eq!(inner.found, TlvType::FlowLabel as u8); assert_eq!(found, TlvType::FlowLabel as u8);
assert_eq!(inner.expected, Some(TlvType::MsgToUser as u8)); assert_eq!(expected, Some(TlvType::MsgToUser as u8));
} else { } else {
panic!("Wrong error type returned: {:?}", error); panic!("Wrong error type returned: {:?}", error);
} }

View File

@ -6,11 +6,13 @@
use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE}; use crate::{ByteConversionError, CcsdsPacket, CRC_CCITT_FALSE};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt::Debug; use core::fmt::{Debug, Display, Formatter};
use core::mem::size_of; use core::mem::size_of;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::error::Error;
pub mod event; pub mod event;
pub mod hk; pub mod hk;
@ -146,19 +148,50 @@ pub enum PfcReal {
DoubleMilStd = 4, DoubleMilStd = 4,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum PusError { pub enum PusError {
#[error("PUS version {0:?} not supported")]
VersionNotSupported(PusVersion), VersionNotSupported(PusVersion),
#[error("checksum verification for crc16 {0:#06x} failed")]
ChecksumFailure(u16), ChecksumFailure(u16),
/// CRC16 needs to be calculated first /// CRC16 needs to be calculated first
//#[error("crc16 was not calculated")] CrcCalculationMissing,
//CrcCalculationMissing, ByteConversion(ByteConversionError),
#[error("pus error: {0}")] }
ByteConversion(#[from] ByteConversionError),
impl Display for PusError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
PusError::VersionNotSupported(v) => {
write!(f, "PUS version {v:?} not supported")
}
PusError::ChecksumFailure(crc) => {
write!(f, "checksum verification for crc16 {crc:#06x} failed")
}
PusError::CrcCalculationMissing => {
write!(f, "crc16 was not calculated")
}
PusError::ByteConversion(e) => {
write!(f, "pus error: {e}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for PusError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if let PusError::ByteConversion(e) = self {
return Some(e);
}
None
}
}
impl From<ByteConversionError> for PusError {
fn from(e: ByteConversionError) -> Self {
PusError::ByteConversion(e)
}
} }
/// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry /// Generic trait to describe common attributes for both PUS Telecommands (TC) and PUS Telemetry

View File

@ -45,7 +45,7 @@ use delegate::delegate;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::AsBytes;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
@ -86,9 +86,9 @@ pub trait GenericPusTcSecondaryHeader {
pub mod zc { pub mod zc {
use crate::ecss::tc::GenericPusTcSecondaryHeader; use crate::ecss::tc::GenericPusTcSecondaryHeader;
use crate::ecss::{PusError, PusVersion}; use crate::ecss::{PusError, PusVersion};
use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16}; use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16};
#[derive(FromBytes, IntoBytes, Immutable, Unaligned)] #[derive(FromZeroes, FromBytes, AsBytes, Unaligned)]
#[repr(C)] #[repr(C)]
pub struct PusTcSecondaryHeader { pub struct PusTcSecondaryHeader {
version_ack: u8, version_ack: u8,
@ -115,7 +115,7 @@ pub mod zc {
impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader { impl GenericPusTcSecondaryHeader for PusTcSecondaryHeader {
#[inline] #[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
PusVersion::try_from((self.version_ack >> 4) & 0b1111).unwrap_or(PusVersion::Invalid) PusVersion::try_from(self.version_ack >> 4 & 0b1111).unwrap_or(PusVersion::Invalid)
} }
#[inline] #[inline]
@ -138,6 +138,16 @@ pub mod zc {
self.source_id.get() self.source_id.get()
} }
} }
impl PusTcSecondaryHeader {
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
Self::read_from(slice)
}
}
} }
#[derive(PartialEq, Eq, Copy, Clone, Debug)] #[derive(PartialEq, Eq, Copy, Clone, Debug)]
@ -382,8 +392,8 @@ impl WritablePusPacket for PusTcCreator<'_> {
curr_idx += CCSDS_HEADER_LEN; curr_idx += CCSDS_HEADER_LEN;
let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTcSecondaryHeader::try_from(self.sec_header).unwrap();
sec_header sec_header
.write_to(&mut slice[curr_idx..curr_idx + tc_header_len]) .write_to_bytes(&mut slice[curr_idx..curr_idx + tc_header_len])
.map_err(|_| ByteConversionError::ZeroCopyToError)?; .ok_or(ByteConversionError::ZeroCopyToError)?;
curr_idx += tc_header_len; curr_idx += tc_header_len;
slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data); slice[curr_idx..curr_idx + self.app_data.len()].copy_from_slice(self.app_data);
@ -492,10 +502,10 @@ impl<'raw_data> PusTcReader<'raw_data> {
} }
.into()); .into());
} }
let sec_header = zc::PusTcSecondaryHeader::read_from_bytes( let sec_header = zc::PusTcSecondaryHeader::from_bytes(
&slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN], &slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN],
) )
.map_err(|_| ByteConversionError::ZeroCopyFromError)?; .ok_or(ByteConversionError::ZeroCopyFromError)?;
current_idx += PUC_TC_SECONDARY_HEADER_LEN; current_idx += PUC_TC_SECONDARY_HEADER_LEN;
let raw_data = &slice[0..total_len]; let raw_data = &slice[0..total_len];
let pus_tc = Self { let pus_tc = Self {

View File

@ -54,7 +54,7 @@ use crate::{
use core::mem::size_of; use core::mem::size_of;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::AsBytes;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::vec::Vec; use alloc::vec::Vec;
@ -83,9 +83,9 @@ pub trait GenericPusTmSecondaryHeader {
pub mod zc { pub mod zc {
use super::GenericPusTmSecondaryHeader; use super::GenericPusTmSecondaryHeader;
use crate::ecss::{PusError, PusVersion}; use crate::ecss::{PusError, PusVersion};
use zerocopy::{FromBytes, Immutable, IntoBytes, NetworkEndian, Unaligned, U16}; use zerocopy::{AsBytes, FromBytes, FromZeroes, NetworkEndian, Unaligned, U16};
#[derive(FromBytes, IntoBytes, Immutable, Unaligned)] #[derive(FromBytes, FromZeroes, AsBytes, Unaligned)]
#[repr(C)] #[repr(C)]
pub struct PusTmSecHeaderWithoutTimestamp { pub struct PusTmSecHeaderWithoutTimestamp {
pus_version_and_sc_time_ref_status: u8, pus_version_and_sc_time_ref_status: u8,
@ -117,10 +117,20 @@ pub mod zc {
} }
} }
impl PusTmSecHeaderWithoutTimestamp {
pub fn write_to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
Self::read_from(slice)
}
}
impl GenericPusTmSecondaryHeader for PusTmSecHeaderWithoutTimestamp { impl GenericPusTmSecondaryHeader for PusTmSecHeaderWithoutTimestamp {
#[inline] #[inline]
fn pus_version(&self) -> PusVersion { fn pus_version(&self) -> PusVersion {
PusVersion::try_from((self.pus_version_and_sc_time_ref_status >> 4) & 0b1111) PusVersion::try_from(self.pus_version_and_sc_time_ref_status >> 4 & 0b1111)
.unwrap_or(PusVersion::Invalid) .unwrap_or(PusVersion::Invalid)
} }
@ -403,8 +413,8 @@ impl<'time, 'src_data> PusTmCreator<'time, 'src_data> {
let sec_header_len = size_of::<zc::PusTmSecHeaderWithoutTimestamp>(); let sec_header_len = size_of::<zc::PusTmSecHeaderWithoutTimestamp>();
let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap(); let sec_header = zc::PusTmSecHeaderWithoutTimestamp::try_from(self.sec_header).unwrap();
sec_header sec_header
.write_to(&mut slice[curr_idx..curr_idx + sec_header_len]) .write_to_bytes(&mut slice[curr_idx..curr_idx + sec_header_len])
.map_err(|_| ByteConversionError::ZeroCopyToError)?; .ok_or(ByteConversionError::ZeroCopyToError)?;
curr_idx += sec_header_len; curr_idx += sec_header_len;
slice[curr_idx..curr_idx + self.sec_header.timestamp.len()] slice[curr_idx..curr_idx + self.sec_header.timestamp.len()]
.copy_from_slice(self.sec_header.timestamp); .copy_from_slice(self.sec_header.timestamp);
@ -561,10 +571,10 @@ impl<'raw_data> PusTmReader<'raw_data> {
} }
.into()); .into());
} }
let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::read_from_bytes( let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::from_bytes(
&slice[current_idx..current_idx + PUS_TM_MIN_SEC_HEADER_LEN], &slice[current_idx..current_idx + PUS_TM_MIN_SEC_HEADER_LEN],
) )
.map_err(|_| ByteConversionError::ZeroCopyFromError)?; .ok_or(ByteConversionError::ZeroCopyFromError)?;
current_idx += PUS_TM_MIN_SEC_HEADER_LEN; current_idx += PUS_TM_MIN_SEC_HEADER_LEN;
let zc_sec_header_wrapper = zc::PusTmSecHeader { let zc_sec_header_wrapper = zc::PusTmSecHeader {
zc_header: sec_header_zc, zc_header: sec_header_zc,
@ -700,7 +710,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
if raw_tm_len < CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN + timestamp_len { if raw_tm_len < CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN + timestamp_len {
return None; return None;
} }
let sp_header = crate::zc::SpHeader::read_from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap(); let sp_header = crate::zc::SpHeader::from_bytes(&raw_tm[0..CCSDS_HEADER_LEN]).unwrap();
if raw_tm_len < sp_header.total_len() { if raw_tm_len < sp_header.total_len() {
return None; return None;
} }
@ -741,7 +751,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
#[inline] #[inline]
pub fn sp_header(&self) -> crate::zc::SpHeader { pub fn sp_header(&self) -> crate::zc::SpHeader {
// Valid minimum length of packet was checked before. // Valid minimum length of packet was checked before.
crate::zc::SpHeader::read_from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap() crate::zc::SpHeader::from_bytes(&self.raw_tm[0..CCSDS_HEADER_LEN]).unwrap()
} }
/// Helper API to generate the portion of the secondary header without a timestamp from the /// Helper API to generate the portion of the secondary header without a timestamp from the
@ -749,7 +759,7 @@ impl<'raw> PusTmZeroCopyWriter<'raw> {
#[inline] #[inline]
pub fn sec_header_without_timestamp(&self) -> PusTmSecHeaderWithoutTimestamp { pub fn sec_header_without_timestamp(&self) -> PusTmSecHeaderWithoutTimestamp {
// Valid minimum length of packet was checked before. // Valid minimum length of packet was checked before.
PusTmSecHeaderWithoutTimestamp::read_from_bytes( PusTmSecHeaderWithoutTimestamp::from_bytes(
&self.raw_tm[CCSDS_HEADER_LEN..CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN], &self.raw_tm[CCSDS_HEADER_LEN..CCSDS_HEADER_LEN + PUS_TM_MIN_SEC_HEADER_LEN],
) )
.unwrap() .unwrap()
@ -1111,7 +1121,7 @@ mod tests {
} }
fn verify_ping_reply_generic( fn verify_ping_reply_generic(
tm: &(impl GenericPusTmSecondaryHeader + PusPacket), tm: &(impl CcsdsPacket + GenericPusTmSecondaryHeader + PusPacket),
has_user_data: bool, has_user_data: bool,
exp_full_len: usize, exp_full_len: usize,
) { ) {

View File

@ -61,10 +61,15 @@ extern crate alloc;
#[cfg(any(feature = "std", test))] #[cfg(any(feature = "std", test))]
extern crate std; extern crate std;
use core::{fmt::Debug, hash::Hash}; use core::{
fmt::{Debug, Display, Formatter},
hash::Hash,
};
use crc::{Crc, CRC_16_IBM_3740}; use crc::{Crc, CRC_16_IBM_3740};
use delegate::delegate; use delegate::delegate;
use zerocopy::{FromBytes, IntoBytes};
#[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -73,6 +78,7 @@ pub mod cfdp;
pub mod ecss; pub mod ecss;
pub mod seq_count; pub mod seq_count;
pub mod time; pub mod time;
pub mod uslp;
pub mod util; pub mod util;
mod private { mod private {
@ -88,24 +94,55 @@ pub const MAX_APID: u16 = 2u16.pow(11) - 1;
pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1; pub const MAX_SEQ_COUNT: u16 = 2u16.pow(14) - 1;
/// Generic error type when converting to and from raw byte slices. /// Generic error type when converting to and from raw byte slices.
#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum ByteConversionError { pub enum ByteConversionError {
/// The passed slice is too small. Returns the passed slice length and expected minimum size /// The passed slice is too small. Returns the passed slice length and expected minimum size
#[error("target slice with size {found} is too small, expected size of at least {expected}")] ToSliceTooSmall {
ToSliceTooSmall { found: usize, expected: usize }, found: usize,
expected: usize,
},
/// The provider buffer is too small. Returns the passed slice length and expected minimum size /// The provider buffer is too small. Returns the passed slice length and expected minimum size
#[error("source slice with size {found} too small, expected at least {expected} bytes")] FromSliceTooSmall {
FromSliceTooSmall { found: usize, expected: usize }, found: usize,
expected: usize,
},
/// The [zerocopy] library failed to write to bytes /// The [zerocopy] library failed to write to bytes
#[error("zerocopy serialization error")]
ZeroCopyToError, ZeroCopyToError,
/// The [zerocopy] library failed to read from bytes
#[error("zerocopy deserialization error")]
ZeroCopyFromError, ZeroCopyFromError,
} }
impl Display for ByteConversionError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
ByteConversionError::ToSliceTooSmall { found, expected } => {
write!(
f,
"target slice with size {} is too small, expected size of at least {}",
found, expected
)
}
ByteConversionError::FromSliceTooSmall { found, expected } => {
write!(
f,
"source slice with size {} too small, expected at least {} bytes",
found, expected
)
}
ByteConversionError::ZeroCopyToError => {
write!(f, "zerocopy serialization error")
}
ByteConversionError::ZeroCopyFromError => {
write!(f, "zerocopy deserialization error")
}
}
}
}
#[cfg(feature = "std")]
impl Error for ByteConversionError {}
/// CCSDS packet type enumeration. /// CCSDS packet type enumeration.
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -697,8 +734,8 @@ impl SpHeader {
expected: CCSDS_HEADER_LEN, expected: CCSDS_HEADER_LEN,
}); });
} }
let zc_header = zc::SpHeader::read_from_bytes(&buf[0..CCSDS_HEADER_LEN]) let zc_header = zc::SpHeader::from_bytes(&buf[0..CCSDS_HEADER_LEN])
.map_err(|_| ByteConversionError::ZeroCopyFromError)?; .ok_or(ByteConversionError::ZeroCopyFromError)?;
Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..])) Ok((Self::from(zc_header), &buf[CCSDS_HEADER_LEN..]))
} }
@ -716,8 +753,8 @@ impl SpHeader {
} }
let zc_header: zc::SpHeader = zc::SpHeader::from(*self); let zc_header: zc::SpHeader = zc::SpHeader::from(*self);
zc_header zc_header
.write_to(&mut buf[0..CCSDS_HEADER_LEN]) .to_bytes(&mut buf[0..CCSDS_HEADER_LEN])
.map_err(|_| ByteConversionError::ZeroCopyToError)?; .ok_or(ByteConversionError::ZeroCopyToError)?;
Ok(&mut buf[CCSDS_HEADER_LEN..]) Ok(&mut buf[CCSDS_HEADER_LEN..])
} }
@ -779,9 +816,9 @@ sph_from_other!(SpHeader, crate::zc::SpHeader);
pub mod zc { pub mod zc {
use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK}; use crate::{CcsdsPacket, CcsdsPrimaryHeader, PacketId, PacketSequenceCtrl, VERSION_MASK};
use zerocopy::byteorder::NetworkEndian; use zerocopy::byteorder::NetworkEndian;
use zerocopy::{FromBytes, Immutable, IntoBytes, Unaligned, U16}; use zerocopy::{AsBytes, FromBytes, FromZeroes, Unaligned, U16};
#[derive(FromBytes, IntoBytes, Immutable, Unaligned, Debug)] #[derive(FromBytes, FromZeroes, AsBytes, Unaligned, Debug)]
#[repr(C)] #[repr(C)]
pub struct SpHeader { pub struct SpHeader {
version_packet_id: U16<NetworkEndian>, version_packet_id: U16<NetworkEndian>,
@ -806,6 +843,14 @@ pub mod zc {
data_len: U16::from(data_len), data_len: U16::from(data_len),
} }
} }
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
SpHeader::read_from(slice)
}
pub fn to_bytes(&self, slice: &mut [u8]) -> Option<()> {
self.write_to(slice)
}
} }
impl CcsdsPacket for SpHeader { impl CcsdsPacket for SpHeader {
@ -874,7 +919,6 @@ pub(crate) mod tests {
use postcard::{from_bytes, to_allocvec}; use postcard::{from_bytes, to_allocvec};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
use zerocopy::FromBytes;
const CONST_SP: SpHeader = SpHeader::new( const CONST_SP: SpHeader = SpHeader::new(
PacketId::new_for_tc(true, 0x36), PacketId::new_for_tc(true, 0x36),
@ -1154,7 +1198,7 @@ pub(crate) mod tests {
#[test] #[test]
fn test_zc_sph() { fn test_zc_sph() {
use zerocopy::IntoBytes; use zerocopy::AsBytes;
let sp_header = SpHeader::new_for_unseg_tc_checked(0x7FF, pow(2, 14) - 1, 0) let sp_header = SpHeader::new_for_unseg_tc_checked(0x7FF, pow(2, 14) - 1, 0)
.expect("Error creating SP header"); .expect("Error creating SP header");
@ -1174,7 +1218,7 @@ pub(crate) mod tests {
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let mut slice = [0; 6]; let mut slice = [0; 6];
sp_header_zc.write_to(slice.as_mut_slice()).unwrap(); sp_header_zc.write_to(slice.as_mut_slice());
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
assert_eq!(slice[1], 0xFF); assert_eq!(slice[1], 0xFF);
@ -1185,7 +1229,7 @@ pub(crate) mod tests {
let mut test_vec = vec![0_u8; 6]; let mut test_vec = vec![0_u8; 6];
let slice = test_vec.as_mut_slice(); let slice = test_vec.as_mut_slice();
sp_header_zc.write_to(slice).unwrap(); sp_header_zc.write_to(slice);
let slice = test_vec.as_slice(); let slice = test_vec.as_slice();
assert_eq!(slice.len(), 6); assert_eq!(slice.len(), 6);
assert_eq!(slice[0], 0x17); assert_eq!(slice[0], 0x17);
@ -1195,8 +1239,8 @@ pub(crate) mod tests {
assert_eq!(slice[4], 0x00); assert_eq!(slice[4], 0x00);
assert_eq!(slice[5], 0x00); assert_eq!(slice[5], 0x00);
let sp_header = zc::SpHeader::read_from_bytes(slice); let sp_header = zc::SpHeader::from_bytes(slice);
assert!(sp_header.is_ok()); assert!(sp_header.is_some());
let sp_header = sp_header.unwrap(); let sp_header = sp_header.unwrap();
assert_eq!(sp_header.ccsds_version(), 0b000); assert_eq!(sp_header.ccsds_version(), 0b000);
assert_eq!(sp_header.packet_id_raw(), 0x17FF); assert_eq!(sp_header.packet_id_raw(), 0x17FF);

View File

@ -7,13 +7,17 @@
use crate::private::Sealed; use crate::private::Sealed;
use crate::ByteConversionError; use crate::ByteConversionError;
use core::cmp::Ordering; use core::cmp::Ordering;
use core::fmt::Debug; use core::fmt::{Debug, Display, Formatter};
use core::ops::{Add, AddAssign}; use core::ops::{Add, AddAssign};
use core::time::Duration; use core::time::Duration;
use delegate::delegate;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use super::StdTimestampError; use super::StdTimestampError;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "std")]
use std::time::{SystemTime, SystemTimeError}; use std::time::{SystemTime, SystemTimeError};
#[cfg(feature = "chrono")] #[cfg(feature = "chrono")]
@ -89,19 +93,49 @@ pub enum SubmillisPrecision {
Reserved = 0b11, Reserved = 0b11,
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum CdsError { pub enum CdsError {
/// CCSDS days value exceeds maximum allowed size or is negative /// CCSDS days value exceeds maximum allowed size or is negative
#[error("invalid ccsds days {0}")]
InvalidCcsdsDays(i64), InvalidCcsdsDays(i64),
/// There are distinct constructors depending on the days field width detected in the preamble /// There are distinct constructors depending on the days field width detected in the preamble
/// field. This error will be returned if there is a missmatch. /// field. This error will be returned if there is a missmatch.
#[error("wrong constructor for length of day {0:?} detected in preamble")]
InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment), InvalidCtorForDaysOfLenInPreamble(LengthOfDaySegment),
#[error("date before CCSDS epoch: {0}")] DateBeforeCcsdsEpoch(DateBeforeCcsdsEpochError),
DateBeforeCcsdsEpoch(#[from] DateBeforeCcsdsEpochError), }
impl Display for CdsError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
CdsError::InvalidCcsdsDays(days) => {
write!(f, "invalid ccsds days {days}")
}
CdsError::InvalidCtorForDaysOfLenInPreamble(length_of_day) => {
write!(
f,
"wrong constructor for length of day {length_of_day:?} detected in preamble",
)
}
CdsError::DateBeforeCcsdsEpoch(e) => write!(f, "date before CCSDS epoch: {e}"),
}
}
}
#[cfg(feature = "std")]
impl Error for CdsError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
CdsError::DateBeforeCcsdsEpoch(e) => Some(e),
_ => None,
}
}
}
impl From<DateBeforeCcsdsEpochError> for CdsError {
fn from(value: DateBeforeCcsdsEpochError) -> Self {
Self::DateBeforeCcsdsEpoch(value)
}
} }
pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment { pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment {
@ -266,23 +300,20 @@ impl CdsConverter for ConversionFromUnix {
self.unix_days_seconds self.unix_days_seconds
} }
} }
/// Helper struct which generates fields for the CDS time provider from a datetime. /// Helper struct which generates fields for the CDS time provider from a datetime.
#[cfg(feature = "chrono")]
struct ConversionFromChronoDatetime { struct ConversionFromChronoDatetime {
unix_conversion: ConversionFromUnix, unix_conversion: ConversionFromUnix,
submillis_prec: SubmillisPrecision, submillis_prec: SubmillisPrecision,
submillis: u32, submillis: u32,
} }
#[cfg(feature = "chrono")]
impl CdsCommon for ConversionFromChronoDatetime { impl CdsCommon for ConversionFromChronoDatetime {
#[inline] #[inline]
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate::delegate! { delegate! {
to self.unix_conversion { to self.unix_conversion {
#[inline] #[inline]
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
@ -297,9 +328,8 @@ impl CdsCommon for ConversionFromChronoDatetime {
} }
} }
#[cfg(feature = "chrono")]
impl CdsConverter for ConversionFromChronoDatetime { impl CdsConverter for ConversionFromChronoDatetime {
delegate::delegate! {to self.unix_conversion { delegate! {to self.unix_conversion {
#[inline] #[inline]
fn unix_days_seconds(&self) -> i64; fn unix_days_seconds(&self) -> i64;
}} }}
@ -336,6 +366,7 @@ impl ConversionFromChronoDatetime {
Self::new_generic(dt, SubmillisPrecision::Picoseconds) Self::new_generic(dt, SubmillisPrecision::Picoseconds)
} }
#[cfg(feature = "chrono")]
fn new_generic( fn new_generic(
dt: &chrono::DateTime<chrono::Utc>, dt: &chrono::DateTime<chrono::Utc>,
prec: SubmillisPrecision, prec: SubmillisPrecision,
@ -417,7 +448,7 @@ impl CdsCommon for ConversionFromNow {
fn submillis_precision(&self) -> SubmillisPrecision { fn submillis_precision(&self) -> SubmillisPrecision {
self.submillis_prec self.submillis_prec
} }
delegate::delegate! { delegate! {
to self.unix_conversion { to self.unix_conversion {
fn ms_of_day(&self) -> u32; fn ms_of_day(&self) -> u32;
fn ccsds_days_as_u32(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32;
@ -431,7 +462,7 @@ impl CdsCommon for ConversionFromNow {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl CdsConverter for ConversionFromNow { impl CdsConverter for ConversionFromNow {
delegate::delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }} delegate! {to self.unix_conversion { fn unix_days_seconds(&self) -> i64; }}
} }
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
@ -569,7 +600,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
)); ));
} }
let pfield = buf[0]; let pfield = buf[0];
match CcsdsTimeCode::try_from((pfield >> 4) & 0b111) { match CcsdsTimeCode::try_from(pfield >> 4 & 0b111) {
Ok(cds_type) => match cds_type { Ok(cds_type) => match cds_type {
CcsdsTimeCode::Cds => (), CcsdsTimeCode::Cds => (),
_ => { _ => {
@ -582,7 +613,7 @@ impl<ProvidesDaysLen: ProvidesDaysLength> CdsTime<ProvidesDaysLen> {
_ => { _ => {
return Err(TimestampError::InvalidTimeCode { return Err(TimestampError::InvalidTimeCode {
expected: CcsdsTimeCode::Cds, expected: CcsdsTimeCode::Cds,
found: (pfield >> 4) & 0b111, found: pfield >> 4 & 0b111,
}); });
} }
}; };

View File

@ -63,12 +63,20 @@ pub fn ccsds_time_code_from_p_field(pfield: u8) -> Result<CcsdsTimeCode, u8> {
CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits) CcsdsTimeCode::try_from(raw_bits).map_err(|_| raw_bits)
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone, thiserror::Error)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[error("date before ccsds epoch: {0:?}")]
pub struct DateBeforeCcsdsEpochError(UnixTime); pub struct DateBeforeCcsdsEpochError(UnixTime);
impl Display for DateBeforeCcsdsEpochError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
write!(f, "date before ccsds epoch: {:?}", self.0)
}
}
#[cfg(feature = "std")]
impl Error for DateBeforeCcsdsEpochError {}
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))] #[cfg_attr(feature = "defmt", derive(defmt::Format))]

685
src/uslp/mod.rs Normal file
View File

@ -0,0 +1,685 @@
/// # Support of the CCSDS Unified Space Data Link Protocol (USLP)
use crate::{ByteConversionError, CRC_CCITT_FALSE};
/// Only this version is supported by the library
pub const USLP_VERSION_NUMBER: u8 = 0b1100;
/// Identifies the association of the data contained in the transfer frame.
#[derive(
Debug, Copy, Clone, PartialEq, Eq, num_enum::TryFromPrimitive, num_enum::IntoPrimitive,
)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
pub enum SourceOrDestField {
/// SCID refers to the source of the transfer frame.
Source = 0,
/// SCID refers to the destination of the transfer frame.
Dest = 1,
}
#[derive(
Debug, Copy, Clone, PartialEq, Eq, num_enum::TryFromPrimitive, num_enum::IntoPrimitive,
)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
pub enum BypassSequenceControlFlag {
/// Acceptance of this frame on the receiving end is subject to normal frame acceptance
/// checks of FARM.
SequenceControlledQoS = 0,
/// Frame Acceptance Checks of FARM by the receiving end shall be bypassed.
ExpeditedQoS = 1,
}
#[derive(
Debug, Copy, Clone, PartialEq, Eq, num_enum::TryFromPrimitive, num_enum::IntoPrimitive,
)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
pub enum ProtocolControlCommandFlag {
TfdfContainsUserData = 0,
TfdfContainsProtocolInfo = 1,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum UslpError {
ByteConversion(ByteConversionError),
HeaderIsTruncated,
InvalidProtocolId(u8),
InvalidConstructionRule(u8),
InvalidVersionNumber(u8),
InvalidVcid(u8),
InvalidMapId(u8),
ChecksumFailure(u16),
}
impl From<ByteConversionError> for UslpError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct InvalidValueForLen {
value: u64,
len: u8,
}
#[derive(Debug, Copy, Clone, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct PrimaryHeader {
pub spacecraft_id: u16,
pub source_or_dest_field: SourceOrDestField,
pub vc_id: u8,
pub map_id: u8,
frame_len_field: u16,
pub sequence_control_flag: BypassSequenceControlFlag,
pub protocol_control_command_flag: ProtocolControlCommandFlag,
pub ocf_flag: bool,
vc_frame_count_len: u8,
vc_frame_count: u64,
}
impl PrimaryHeader {
pub fn new(
spacecraft_id: u16,
source_or_dest_field: SourceOrDestField,
vc_id: u8,
map_id: u8,
frame_len: u16,
) -> Result<Self, UslpError> {
if vc_id > 0b111111 {
return Err(UslpError::InvalidVcid(vc_id));
}
if map_id > 0b1111 {
return Err(UslpError::InvalidMapId(map_id));
}
Ok(Self {
spacecraft_id,
source_or_dest_field,
vc_id,
map_id,
frame_len_field: frame_len.saturating_sub(1),
sequence_control_flag: BypassSequenceControlFlag::SequenceControlledQoS,
protocol_control_command_flag: ProtocolControlCommandFlag::TfdfContainsUserData,
ocf_flag: false,
vc_frame_count_len: 0,
vc_frame_count: 0,
})
}
pub fn set_vc_frame_count(
&mut self,
count_len: u8,
count: u64,
) -> Result<(), InvalidValueForLen> {
if count > 2_u64.pow(count_len as u32 * 8) - 1 {
return Err(InvalidValueForLen {
value: count,
len: count_len,
});
}
self.vc_frame_count_len = count_len;
self.vc_frame_count = count;
Ok(())
}
#[inline(always)]
pub fn vc_frame_count(&self) -> u64 {
self.vc_frame_count
}
#[inline(always)]
pub fn vc_frame_count_len(&self) -> u8 {
self.vc_frame_count_len
}
pub fn from_bytes(buf: &[u8]) -> Result<Self, UslpError> {
if buf.len() < 4 {
return Err(ByteConversionError::FromSliceTooSmall {
found: buf.len(),
expected: 4,
}
.into());
}
// Can only deal with regular frames for now.
if (buf[3] & 0b1) == 1 {
return Err(UslpError::HeaderIsTruncated);
}
// We could check this above, but this is a better error for the case where the user
// tries to read a truncated frame.
if buf.len() < 7 {
return Err(ByteConversionError::FromSliceTooSmall {
found: buf.len(),
expected: 7,
}
.into());
}
let version_number = (buf[0] >> 4) & 0b1111;
if version_number != USLP_VERSION_NUMBER {
return Err(UslpError::InvalidVersionNumber(version_number));
}
let source_or_dest_field = match (buf[2] >> 3) & 1 {
0 => SourceOrDestField::Source,
1 => SourceOrDestField::Dest,
_ => unreachable!(),
};
let vc_frame_count_len = buf[6] & 0b111;
if buf.len() < 7 + vc_frame_count_len as usize {
return Err(ByteConversionError::FromSliceTooSmall {
found: buf.len(),
expected: 7 + vc_frame_count_len as usize,
}
.into());
}
let vc_frame_count = match vc_frame_count_len {
1 => buf[7] as u64,
2 => u16::from_be_bytes(buf[7..9].try_into().unwrap()) as u64,
4 => u32::from_be_bytes(buf[7..11].try_into().unwrap()) as u64,
len => {
let mut vcf_count = 0u64;
let mut end = len;
for byte in buf[7..7 + len as usize].iter() {
vcf_count |= (*byte as u64) << ((end - 1) * 8);
end -= 1;
}
vcf_count
}
};
Ok(Self {
spacecraft_id: (((buf[0] as u16) & 0b1111) << 12)
| ((buf[1] as u16) << 4)
| ((buf[2] as u16) >> 4) & 0b1111,
source_or_dest_field,
vc_id: ((buf[2] & 0b111) << 3) | (buf[3] >> 5) & 0b111,
map_id: (buf[3] >> 1) & 0b1111,
frame_len_field: ((buf[4] as u16) << 8) | buf[5] as u16,
sequence_control_flag: ((buf[6] >> 7) & 0b1).try_into().unwrap(),
protocol_control_command_flag: ((buf[6] >> 6) & 0b1).try_into().unwrap(),
ocf_flag: ((buf[6] >> 3) & 0b1) != 0,
vc_frame_count_len,
vc_frame_count,
})
}
pub fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
if buf.len() < self.len_header() {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: self.len_header(),
});
}
buf[0] = (USLP_VERSION_NUMBER << 4) | ((self.spacecraft_id >> 12) as u8) & 0b1111;
buf[1] = (self.spacecraft_id >> 4) as u8;
buf[2] = (((self.spacecraft_id & 0b1111) as u8) << 4)
| ((self.source_or_dest_field as u8) << 3)
| (self.vc_id >> 3) & 0b111;
buf[3] = ((self.vc_id & 0b111) << 5) | (self.map_id << 1);
buf[4..6].copy_from_slice(&self.frame_len_field.to_be_bytes());
buf[6] = ((self.sequence_control_flag as u8) << 7)
| ((self.protocol_control_command_flag as u8) << 6)
| ((self.ocf_flag as u8) << 3)
| self.vc_frame_count_len;
let mut packet_idx = 7;
for idx in (0..self.vc_frame_count_len).rev() {
buf[packet_idx] = ((self.vc_frame_count >> (idx * 8)) & 0xff) as u8;
packet_idx += 1;
}
Ok(self.len_header())
}
#[inline(always)]
pub fn set_frame_len(&mut self, frame_len: usize) {
// 4.1.2.7.2
// The field contains a length count C that equals one fewer than the total octets
// in the transfer frame.
self.frame_len_field = frame_len.saturating_sub(1) as u16;
}
#[inline(always)]
pub fn len_header(&self) -> usize {
7 + self.vc_frame_count_len as usize
}
#[inline(always)]
pub fn len_frame(&self) -> usize {
// 4.1.2.7.2
// The field contains a length count C that equals one fewer than the total octets
// in the transfer frame.
self.frame_len_field as usize + 1
}
}
/// Custom implementation which skips the check whether the VC frame count length field is equal.
/// Only the actual VC count value is compared.
impl PartialEq for PrimaryHeader {
fn eq(&self, other: &Self) -> bool {
self.spacecraft_id == other.spacecraft_id
&& self.source_or_dest_field == other.source_or_dest_field
&& self.vc_id == other.vc_id
&& self.map_id == other.map_id
&& self.frame_len_field == other.frame_len_field
&& self.sequence_control_flag == other.sequence_control_flag
&& self.protocol_control_command_flag == other.protocol_control_command_flag
&& self.ocf_flag == other.ocf_flag
&& self.vc_frame_count == other.vc_frame_count
}
}
#[derive(
Debug, Copy, Clone, PartialEq, Eq, num_enum::TryFromPrimitive, num_enum::IntoPrimitive,
)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
#[non_exhaustive]
pub enum UslpProtocolId {
SpacePacketsOrEncapsulation = 0b00000,
/// COP-1 control commands within the TFDZ.
Cop1ControlCommands = 0b00001,
/// COP-P control commands within the TFDZ.
CopPControlCommands = 0b00010,
/// SDLS control commands within the TFDZ.
Sdls = 0b00011,
UserDefinedOctetStream = 0b00100,
/// Proximity-1 Supervisory Protocol Data Units (SPDUs) within the TFDZ.
Spdu = 0b00111,
/// Entire fixed-length TFDZ contains idle data.
Idle = 0b11111,
}
#[derive(
Debug, Copy, Clone, PartialEq, Eq, num_enum::TryFromPrimitive, num_enum::IntoPrimitive,
)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
pub enum ConstructionRule {
/// Indicated fixed-length TFDZ whose contents are CCSDS packets concatenated together, which
/// span transfer frame boundaries. The First Header Pointer (FHP) is required for packet
/// extraction.
PacketSpanningMultipleFrames = 0b000,
StartOfMapaSduOrVcaSdu = 0b001,
ContinuingPortionOfMapaSdu = 0b010,
OctetStream = 0b011,
StartingSegment = 0b100,
ContinuingSegment = 0b101,
LastSegment = 0b110,
NoSegmentation = 0b111,
}
impl ConstructionRule {
pub const fn applicable_to_fixed_len_tfdz(&self) -> bool {
match self {
ConstructionRule::PacketSpanningMultipleFrames => true,
ConstructionRule::StartOfMapaSduOrVcaSdu => true,
ConstructionRule::ContinuingPortionOfMapaSdu => true,
ConstructionRule::OctetStream => false,
ConstructionRule::StartingSegment => false,
ConstructionRule::ContinuingSegment => false,
ConstructionRule::LastSegment => false,
ConstructionRule::NoSegmentation => false,
}
}
}
pub struct TransferFrameDataFieldHeader {
/// Construction rule for the TFDZ.
construction_rule: ConstructionRule,
uslp_protocol_id: UslpProtocolId,
/// First header or last valid octet pointer. Only present if the constuction rule indicated
/// a fixed-length TFDZ.
fhp_or_lvo: Option<u16>,
}
impl TransferFrameDataFieldHeader {
pub fn len_header(&self) -> usize {
if self.construction_rule.applicable_to_fixed_len_tfdz() {
3
} else {
1
}
}
pub fn construction_rule(&self) -> ConstructionRule {
self.construction_rule
}
pub fn uslp_protocol_id(&self) -> UslpProtocolId {
self.uslp_protocol_id
}
pub fn fhp_or_lvo(&self) -> Option<u16> {
self.fhp_or_lvo
}
pub fn from_bytes(buf: &[u8]) -> Result<Self, UslpError> {
if buf.is_empty() {
return Err(ByteConversionError::FromSliceTooSmall {
found: 0,
expected: 1,
}
.into());
}
let construction_rule = ConstructionRule::try_from((buf[0] >> 5) & 0b111)
.map_err(|e| UslpError::InvalidConstructionRule(e.number))?;
let mut fhp_or_lvo = None;
if construction_rule.applicable_to_fixed_len_tfdz() {
if buf.len() < 3 {
return Err(ByteConversionError::FromSliceTooSmall {
found: buf.len(),
expected: 3,
}
.into());
}
fhp_or_lvo = Some(u16::from_be_bytes(buf[1..3].try_into().unwrap()));
}
Ok(Self {
construction_rule,
uslp_protocol_id: UslpProtocolId::try_from(buf[0] & 0b11111)
.map_err(|e| UslpError::InvalidProtocolId(e.number))?,
fhp_or_lvo,
})
}
}
/// Simple USLP transfer frame reader.
///
/// Currently, only insert zone lengths of 0 are supported.
pub struct TransferFrameReader<'buf> {
primary_header: PrimaryHeader,
data_field_header: TransferFrameDataFieldHeader,
data: &'buf [u8],
operational_control_field: Option<u32>,
}
impl<'buf> TransferFrameReader<'buf> {
/// This function assumes an insert zone length of 0.
pub fn from_bytes(
buf: &'buf [u8],
has_fecf: bool,
) -> Result<TransferFrameReader<'buf>, UslpError> {
let primary_header = PrimaryHeader::from_bytes(buf)?;
if primary_header.len_frame() > buf.len() {
return Err(ByteConversionError::FromSliceTooSmall {
expected: primary_header.len_frame(),
found: buf.len(),
}
.into());
}
let data_field_header =
TransferFrameDataFieldHeader::from_bytes(&buf[primary_header.len_header()..])?;
let data_idx = primary_header.len_header() + data_field_header.len_header();
let frame_len = primary_header.len_frame();
let mut operational_control_field = None;
let mut data_len = frame_len - data_idx;
if has_fecf {
data_len -= 2;
}
if primary_header.ocf_flag {
data_len -= 4;
operational_control_field = Some(u32::from_be_bytes(
buf[data_idx + data_len..data_idx + data_len + 4]
.try_into()
.unwrap(),
));
}
let data_end = data_idx + data_len;
let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&buf[0..frame_len]);
if digest.finalize() != 0 {
return Err(UslpError::ChecksumFailure(u16::from_be_bytes(
buf[frame_len - 2..frame_len].try_into().unwrap(),
)));
}
Ok(Self {
primary_header,
data_field_header,
data: buf[data_idx..data_end].try_into().unwrap(),
operational_control_field,
})
}
pub fn len_frame(&self) -> usize {
self.primary_header.len_frame()
}
pub fn primary_header(&self) -> &PrimaryHeader {
&self.primary_header
}
pub fn data_field_header(&self) -> &TransferFrameDataFieldHeader {
&self.data_field_header
}
pub fn data(&self) -> &'buf [u8] {
self.data
}
pub fn operational_control_field(&self) -> &Option<u32> {
&self.operational_control_field
}
}
#[cfg(test)]
mod tests {
use std::println;
use super::*;
fn common_basic_check(buf: &[u8]) {
assert_eq!(buf[0] >> 4, USLP_VERSION_NUMBER);
// First four bits SCID.
assert_eq!(buf[0] & 0b1111, 0b1010);
// Next eight bits SCID.
assert_eq!(buf[1], 0b01011100);
// Last four bits SCID.
assert_eq!(buf[2] >> 4, 0b0011);
assert_eq!((buf[2] >> 3) & 0b1, SourceOrDestField::Dest as u8);
// First three bits VCID.
assert_eq!(buf[2] & 0b111, 0b110);
// Last three bits VCID.
assert_eq!(buf[3] >> 5, 0b101);
// MAP ID
assert_eq!((buf[3] >> 1) & 0b1111, 0b1010);
// End of primary header flag
assert_eq!(buf[3] & 0b1, 0);
assert_eq!(u16::from_be_bytes(buf[4..6].try_into().unwrap()), 0x2345);
}
#[test]
fn test_basic_0() {
let mut buf: [u8; 8] = [0; 8];
// Should be all zeros after writing.
buf[6] = 0xff;
let primary_header = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b1010,
0x2345,
)
.unwrap();
// Virtual channel count 0.
assert_eq!(primary_header.write_to_be_bytes(&mut buf).unwrap(), 7);
common_basic_check(&buf);
// Bypass / Sequence Control Flag.
assert_eq!(
(buf[6] >> 7) & 0b1,
BypassSequenceControlFlag::SequenceControlledQoS as u8
);
// Protocol Control Command Flag.
assert_eq!(
(buf[6] >> 6) & 0b1,
ProtocolControlCommandFlag::TfdfContainsUserData as u8
);
// OCF flag.
assert_eq!((buf[6] >> 3) & 0b1, false as u8);
// VCF count length.
assert_eq!(buf[6] & 0b111, 0);
}
#[test]
fn test_basic_1() {
let mut buf: [u8; 16] = [0; 16];
// Should be all zeros after writing.
buf[6] = 0xff;
let mut primary_header = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b1010,
0x2345,
)
.unwrap();
primary_header.sequence_control_flag = BypassSequenceControlFlag::ExpeditedQoS;
primary_header.protocol_control_command_flag =
ProtocolControlCommandFlag::TfdfContainsProtocolInfo;
primary_header.ocf_flag = true;
primary_header.set_vc_frame_count(4, 0x12345678).unwrap();
// Virtual channel count 4.
assert_eq!(primary_header.write_to_be_bytes(&mut buf).unwrap(), 11);
common_basic_check(&buf);
// Bypass / Sequence Control Flag.
assert_eq!(
(buf[6] >> 7) & 0b1,
BypassSequenceControlFlag::ExpeditedQoS as u8
);
// Protocol Control Command Flag.
assert_eq!(
(buf[6] >> 6) & 0b1,
ProtocolControlCommandFlag::TfdfContainsProtocolInfo as u8
);
// OCF flag.
assert_eq!((buf[6] >> 3) & 0b1, true as u8);
// VCF count length.
assert_eq!(buf[6] & 0b111, 4);
assert_eq!(
u32::from_be_bytes(buf[7..11].try_into().unwrap()),
0x12345678
);
}
#[test]
fn test_reading_0() {
let mut buf: [u8; 8] = [0; 8];
let primary_header = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b1010,
0x2345,
)
.unwrap();
assert_eq!(primary_header.write_to_be_bytes(&mut buf).unwrap(), 7);
let parsed_header = PrimaryHeader::from_bytes(&buf).unwrap();
assert_eq!(parsed_header, primary_header);
}
#[test]
fn test_reading_1() {
let mut buf: [u8; 16] = [0; 16];
let mut primary_header = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b1010,
0x2345,
)
.unwrap();
primary_header.sequence_control_flag = BypassSequenceControlFlag::ExpeditedQoS;
primary_header.protocol_control_command_flag =
ProtocolControlCommandFlag::TfdfContainsProtocolInfo;
primary_header.ocf_flag = true;
primary_header.set_vc_frame_count(4, 0x12345678).unwrap();
assert_eq!(primary_header.write_to_be_bytes(&mut buf).unwrap(), 11);
let parsed_header = PrimaryHeader::from_bytes(&buf).unwrap();
assert_eq!(parsed_header, primary_header);
}
#[test]
fn test_invalid_vcid() {
let error = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b1101011,
0b1010,
0x2345,
);
assert!(error.is_err());
let error = error.unwrap_err();
matches!(error, UslpError::InvalidVcid(0b1101011));
}
#[test]
fn test_invalid_mapid() {
let error = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b10101,
0x2345,
);
assert!(error.is_err());
let error = error.unwrap_err();
matches!(error, UslpError::InvalidMapId(0b10101));
}
#[test]
fn test_invalid_vc_count() {
let mut primary_header = PrimaryHeader::new(
0b10100101_11000011,
SourceOrDestField::Dest,
0b110101,
0b1010,
0x2345,
)
.unwrap();
matches!(
primary_header.set_vc_frame_count(0, 1).unwrap_err(),
InvalidValueForLen { value: 1, len: 0 }
);
matches!(
primary_header.set_vc_frame_count(1, 256).unwrap_err(),
InvalidValueForLen { value: 256, len: 1 }
);
}
#[test]
fn test_frame_parser() {
let mut buf: [u8; 32] = [0; 32];
// Build a variable frame manually.
let mut primary_header =
PrimaryHeader::new(0x01, SourceOrDestField::Dest, 0b110101, 0b1010, 0).unwrap();
let header_len = primary_header.len_header();
buf[header_len] = ((ConstructionRule::NoSegmentation as u8) << 5)
| (UslpProtocolId::UserDefinedOctetStream as u8) & 0b11111;
buf[header_len + 1] = 0x42;
// 1 byte TFDH, 1 byte data, 2 bytes CRC.
primary_header.set_frame_len(header_len + 4);
primary_header.write_to_be_bytes(&mut buf).unwrap();
// Calculate and write CRC16.
let mut digest = CRC_CCITT_FALSE.digest();
digest.update(&buf[0..header_len + 2]);
buf[header_len + 2..header_len + 4].copy_from_slice(&digest.finalize().to_be_bytes());
println!("Buffer: {:x?}", buf);
// Now parse the frame.
let frame = TransferFrameReader::from_bytes(&buf, true).unwrap();
assert_eq!(frame.data().len(), 1);
assert_eq!(frame.data()[0], 0x42);
assert_eq!(
frame.data_field_header().uslp_protocol_id,
UslpProtocolId::UserDefinedOctetStream
);
assert_eq!(
frame.data_field_header().construction_rule,
ConstructionRule::NoSegmentation
);
assert!(frame.data_field_header().fhp_or_lvo().is_none());
assert_eq!(frame.len_frame(), 11);
}
}