diff --git a/CHANGELOG.md b/CHANGELOG.md index 90be3b8..18c7dcd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Add `WritablePduPacket` trait which is a common trait of all CFDP PDU implementations. - Add `CfdpPdu` trait which exposes fields and attributes common to all CFDP PDUs. +- Add `GenericTlv` and `WritableTlv` trait as abstractions for the various TLV types. ## Fixed @@ -20,11 +21,27 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## Changed +- Split up `FinishedPdu`into `FinishedPduCreator` and `FinishedPduReader` to expose specialized + APIs. +- Split up `MetadataPdu`into `MetadataPduCreator` and `MetadataPduReader` to expose specialized + APIs. +- Cleaned up CUC time implementation. Added `width` and `counter` getter methods. - Renamed `SerializablePusPacket` to `WritablePusPacket`. +- Renamed `UnsignedPfc` to `PfcUnsigned` and `RealPfc` to `PfcReal`. - Renamed `WritablePduPacket.written_len` and `SerializablePusPacket.len_packed` to `len_written`. - Introduce custom implementation of `PartialEq` for `CommonPduConfig` which only compares the values for the source entity ID, destination entity ID and transaction sequence number field to allow those fields to have different widths. +- Removed the `PusError::RawDataTooShort` variant which is already covered by + `PusError::ByteConversionError` variant. +- Ranamed `TlvLvError::ByteConversionError` to `TlvLvError::ByteConversion`. +- Renamed `PusError::IncorrectCrc` to `PusError::ChecksumFailure`. +- Some more struct variant changes for error enumerations. + +## Removed + +- `PusError::NoRawData` variant. +- `cfdp::LenInBytes` which was not used. # [v0.7.0-beta.2] 2023-09-26 diff --git a/coverage.py b/coverage.py index 73b81e9..cfbe006 100755 --- a/coverage.py +++ b/coverage.py @@ -13,7 +13,7 @@ def generate_cov_report(open_report: bool, format: str): os.environ["RUSTFLAGS"] = "-Cinstrument-coverage" os.environ["LLVM_PROFILE_FILE"] = "target/coverage/%p-%m.profraw" _LOGGER.info("Executing tests with coverage") - os.system("cargo test") + os.system("cargo test --all-features") out_path = "./target/debug/coverage" if format == "lcov": diff --git a/mod.rs b/mod.rs new file mode 100644 index 0000000..e69de29 diff --git a/src/cfdp/lv.rs b/src/cfdp/lv.rs index de705ad..e84e7a4 100644 --- a/src/cfdp/lv.rs +++ b/src/cfdp/lv.rs @@ -165,7 +165,9 @@ impl<'data> Lv<'data> { #[cfg(test)] pub mod tests { - use crate::cfdp::lv::Lv; + use super::*; + use alloc::string::ToString; + use crate::cfdp::TlvLvError; use crate::ByteConversionError; use std::string::String; @@ -176,7 +178,7 @@ pub mod tests { let lv_res = Lv::new(&lv_data); assert!(lv_res.is_ok()); let lv = lv_res.unwrap(); - assert!(lv.value().len() > 0); + assert!(!lv.value().is_empty()); let val = lv.value(); assert_eq!(val[0], 1); assert_eq!(val[1], 2); @@ -259,6 +261,10 @@ pub mod tests { let error = lv.unwrap_err(); if let TlvLvError::DataTooLarge(size) = error { assert_eq!(size, u8::MAX as usize + 1); + assert_eq!( + error.to_string(), + "data with size 256 larger than allowed 255 bytes" + ); } else { panic!("invalid exception {:?}", error) } diff --git a/src/cfdp/mod.rs b/src/cfdp/mod.rs index cab3351..33ee333 100644 --- a/src/cfdp/mod.rs +++ b/src/cfdp/mod.rs @@ -94,17 +94,6 @@ pub enum FaultHandlerCode { AbandonTransaction = 0b0100, } -#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[repr(u8)] -pub enum LenInBytes { - ZeroOrNone = 0, - OneByte = 1, - TwoBytes = 2, - ThreeBytes = 4, - FourBytes = 8, -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[repr(u8)] @@ -180,9 +169,12 @@ pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4]; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum TlvLvError { DataTooLarge(usize), - ByteConversionError(ByteConversionError), + ByteConversion(ByteConversionError), /// First value: Found value. Second value: Expected value if there is one. - InvalidTlvTypeField((u8, Option)), + InvalidTlvTypeField { + found: u8, + expected: Option, + }, /// Logically invalid value length detected. The value length may not exceed 255 bytes. /// Depending on the concrete TLV type, the value length may also be logically invalid. InvalidValueLength(usize), @@ -195,7 +187,7 @@ pub enum TlvLvError { impl From for TlvLvError { fn from(value: ByteConversionError) -> Self { - Self::ByteConversionError(value) + Self::ByteConversion(value) } } @@ -210,17 +202,17 @@ impl Display for TlvLvError { u8::MAX ) } - TlvLvError::ByteConversionError(e) => { - write!(f, "{}", e) + TlvLvError::ByteConversion(e) => { + write!(f, "tlv or lv byte conversion: {}", e) } - TlvLvError::InvalidTlvTypeField((found, expected)) => { + TlvLvError::InvalidTlvTypeField { found, expected } => { write!( f, - "invalid TLV type field, found {found}, possibly expected {expected:?}" + "invalid TLV type field, found {found}, expected {expected:?}" ) } TlvLvError::InvalidValueLength(len) => { - write!(f, "invalid value length {len} detected") + write!(f, "invalid value length {len}") } TlvLvError::SecondNameMissing => { write!(f, "second name missing for filestore request or response") @@ -236,8 +228,65 @@ impl Display for TlvLvError { impl Error for TlvLvError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { - TlvLvError::ByteConversionError(e) => Some(e), + TlvLvError::ByteConversion(e) => Some(e), _ => None, } } } + +#[cfg(test)] +mod tests { + use super::*; + #[cfg(feature = "serde")] + use crate::tests::generic_serde_test; + + #[test] + fn test_crc_from_bool() { + assert_eq!(CrcFlag::from(false), CrcFlag::NoCrc); + } + + #[test] + fn test_crc_flag_to_bool() { + let is_true: bool = CrcFlag::WithCrc.into(); + assert!(is_true); + let is_false: bool = CrcFlag::NoCrc.into(); + assert!(!is_false); + } + + #[test] + fn test_default_checksum_type() { + let checksum = ChecksumType::default(); + assert_eq!(checksum, ChecksumType::NullChecksum); + } + + #[test] + fn test_fault_handler_code_from_u8() { + let fault_handler_code_raw = FaultHandlerCode::NoticeOfSuspension as u8; + let fault_handler_code = FaultHandlerCode::try_from(fault_handler_code_raw).unwrap(); + assert_eq!(fault_handler_code, FaultHandlerCode::NoticeOfSuspension); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_impl_pdu_type() { + generic_serde_test(PduType::FileData); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_impl_direction() { + generic_serde_test(Direction::TowardsReceiver); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_impl_transmission_mode() { + generic_serde_test(TransmissionMode::Unacknowledged); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_fault_handler_code() { + generic_serde_test(FaultHandlerCode::NoticeOfCancellation); + } +} diff --git a/src/cfdp/pdu/ack.rs b/src/cfdp/pdu/ack.rs index bc74911..997f40b 100644 --- a/src/cfdp/pdu/ack.rs +++ b/src/cfdp/pdu/ack.rs @@ -200,6 +200,26 @@ mod tests { }; use super::*; + #[cfg(feature = "serde")] + use crate::tests::generic_serde_test; + + fn verify_state(ack_pdu: &AckPdu, expected_crc_flag: CrcFlag, expected_dir: Direction) { + assert_eq!(ack_pdu.condition_code(), ConditionCode::NoError); + assert_eq!(ack_pdu.transaction_status(), TransactionStatus::Active); + + assert_eq!(ack_pdu.crc_flag(), expected_crc_flag); + assert_eq!(ack_pdu.file_flag(), LargeFileFlag::Normal); + assert_eq!(ack_pdu.pdu_type(), PduType::FileDirective); + assert_eq!( + ack_pdu.file_directive_type(), + Some(FileDirectiveType::AckPdu) + ); + assert_eq!(ack_pdu.transmission_mode(), TransmissionMode::Acknowledged); + assert_eq!(ack_pdu.direction(), expected_dir); + assert_eq!(ack_pdu.source_id(), TEST_SRC_ID.into()); + assert_eq!(ack_pdu.dest_id(), TEST_DEST_ID.into()); + assert_eq!(ack_pdu.transaction_seq_num(), TEST_SEQ_NUM.into()); + } #[test] fn test_basic() { @@ -216,21 +236,7 @@ mod tests { ack_pdu.directive_code_of_acked_pdu(), FileDirectiveType::FinishedPdu ); - assert_eq!(ack_pdu.condition_code(), ConditionCode::NoError); - assert_eq!(ack_pdu.transaction_status(), TransactionStatus::Active); - - assert_eq!(ack_pdu.crc_flag(), CrcFlag::NoCrc); - assert_eq!(ack_pdu.file_flag(), LargeFileFlag::Normal); - assert_eq!(ack_pdu.pdu_type(), PduType::FileDirective); - assert_eq!( - ack_pdu.file_directive_type(), - Some(FileDirectiveType::AckPdu) - ); - assert_eq!(ack_pdu.transmission_mode(), TransmissionMode::Acknowledged); - assert_eq!(ack_pdu.direction(), Direction::TowardsReceiver); - assert_eq!(ack_pdu.source_id(), TEST_SRC_ID.into()); - assert_eq!(ack_pdu.dest_id(), TEST_DEST_ID.into()); - assert_eq!(ack_pdu.transaction_seq_num(), TEST_SEQ_NUM.into()); + verify_state(&ack_pdu, CrcFlag::NoCrc, Direction::TowardsReceiver); } fn generic_serialization_test( @@ -296,4 +302,33 @@ mod tests { AckPdu::from_bytes(&ack_vec).expect("ACK PDU deserialization failed"); assert_eq!(ack_deserialized, ack_pdu); } + + #[test] + fn test_for_eof_pdu() { + let pdu_conf = common_pdu_conf(CrcFlag::WithCrc, LargeFileFlag::Normal); + let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); + let ack_pdu = AckPdu::new_for_eof_pdu( + pdu_header, + ConditionCode::NoError, + TransactionStatus::Active, + ); + assert_eq!( + ack_pdu.directive_code_of_acked_pdu(), + FileDirectiveType::EofPdu + ); + verify_state(&ack_pdu, CrcFlag::WithCrc, Direction::TowardsSender); + } + + #[test] + #[cfg(feature = "serde")] + fn test_ack_pdu_serialization() { + let pdu_conf = common_pdu_conf(CrcFlag::WithCrc, LargeFileFlag::Normal); + let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); + let ack_pdu = AckPdu::new_for_eof_pdu( + pdu_header, + ConditionCode::NoError, + TransactionStatus::Active, + ); + generic_serde_test(ack_pdu); + } } diff --git a/src/cfdp/pdu/eof.rs b/src/cfdp/pdu/eof.rs index 2ee5fa4..af30e9c 100644 --- a/src/cfdp/pdu/eof.rs +++ b/src/cfdp/pdu/eof.rs @@ -2,7 +2,7 @@ use crate::cfdp::pdu::{ add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field, FileDirectiveType, PduError, PduHeader, }; -use crate::cfdp::tlv::EntityIdTlv; +use crate::cfdp::tlv::{EntityIdTlv, WritableTlv}; use crate::cfdp::{ConditionCode, CrcFlag, Direction, LargeFileFlag}; use crate::ByteConversionError; #[cfg(feature = "serde")] @@ -147,7 +147,7 @@ impl WritablePduPacket for EofPdu { &mut buf[current_idx..], )?; if let Some(fault_location) = self.fault_location { - current_idx += fault_location.write_to_be_bytes(buf)?; + current_idx += fault_location.write_to_bytes(buf)?; } if self.crc_flag() == CrcFlag::WithCrc { current_idx = add_pdu_crc(buf, current_idx); @@ -168,19 +168,16 @@ mod tests { }; use crate::cfdp::pdu::{FileDirectiveType, PduHeader}; use crate::cfdp::{ConditionCode, CrcFlag, LargeFileFlag, PduType, TransmissionMode}; + #[cfg(feature = "serde")] + use crate::tests::generic_serde_test; - #[test] - fn test_basic() { - let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal); - let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); - let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); - assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4); + fn verify_state(&eof_pdu: &EofPdu, file_flag: LargeFileFlag) { assert_eq!(eof_pdu.file_checksum(), 0x01020304); assert_eq!(eof_pdu.file_size(), 12); assert_eq!(eof_pdu.condition_code(), ConditionCode::NoError); assert_eq!(eof_pdu.crc_flag(), CrcFlag::NoCrc); - assert_eq!(eof_pdu.file_flag(), LargeFileFlag::Normal); + assert_eq!(eof_pdu.file_flag(), file_flag); assert_eq!(eof_pdu.pdu_type(), PduType::FileDirective); assert_eq!( eof_pdu.file_directive_type(), @@ -193,6 +190,15 @@ mod tests { assert_eq!(eof_pdu.transaction_seq_num(), TEST_SEQ_NUM.into()); } + #[test] + fn test_basic() { + let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal); + let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); + let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); + assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 4 + 4); + verify_state(&eof_pdu, LargeFileFlag::Normal); + } + #[test] fn test_serialization() { let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal); @@ -270,4 +276,22 @@ mod tests { panic!("expected crc error"); } } + + #[test] + fn test_with_large_file_flag() { + let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Large); + let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); + let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); + verify_state(&eof_pdu, LargeFileFlag::Large); + assert_eq!(eof_pdu.len_written(), pdu_header.header_len() + 2 + 8 + 4); + } + + #[test] + #[cfg(feature = "serde")] + fn test_eof_serde() { + let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal); + let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); + let eof_pdu = EofPdu::new_no_error(pdu_header, 0x01020304, 12); + generic_serde_test(eof_pdu); + } } diff --git a/src/cfdp/pdu/file_data.rs b/src/cfdp/pdu/file_data.rs index 8575922..63dfdbf 100644 --- a/src/cfdp/pdu/file_data.rs +++ b/src/cfdp/pdu/file_data.rs @@ -248,6 +248,8 @@ mod tests { use crate::cfdp::pdu::tests::{TEST_DEST_ID, TEST_SEQ_NUM, TEST_SRC_ID}; use crate::cfdp::pdu::{CommonPduConfig, PduHeader}; use crate::cfdp::{Direction, SegmentMetadataFlag, SegmentationControl, TransmissionMode}; + #[cfg(feature = "serde")] + use postcard::{from_bytes, to_allocvec}; #[test] fn test_basic() { @@ -445,4 +447,39 @@ mod tests { let fd_pdu_read_back = fd_pdu_read_back.unwrap(); assert_eq!(fd_pdu_read_back, fd_pdu); } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_serialization() { + let common_conf = + CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap(); + let pdu_header = PduHeader::new_for_file_data_default(common_conf, 0); + let file_data: [u8; 4] = [1, 2, 3, 4]; + let fd_pdu = FileDataPdu::new_no_seg_metadata(pdu_header, 10, &file_data); + let output = to_allocvec(&fd_pdu).unwrap(); + let output_converted_back: FileDataPdu = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, fd_pdu); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_serialization_with_seg_metadata() { + let common_conf = + CommonPduConfig::new_with_byte_fields(TEST_SRC_ID, TEST_DEST_ID, TEST_SEQ_NUM).unwrap(); + let pdu_header = PduHeader::new_for_file_data( + common_conf, + 0, + SegmentMetadataFlag::Present, + SegmentationControl::WithRecordBoundaryPreservation, + ); + let file_data: [u8; 4] = [1, 2, 3, 4]; + let seg_metadata: [u8; 4] = [4, 3, 2, 1]; + let segment_meta = + SegmentMetadata::new(RecordContinuationState::StartAndEnd, Some(&seg_metadata)) + .unwrap(); + let fd_pdu = FileDataPdu::new_with_seg_metadata(pdu_header, segment_meta, 10, &file_data); + let output = to_allocvec(&fd_pdu).unwrap(); + let output_converted_back: FileDataPdu = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, fd_pdu); + } } diff --git a/src/cfdp/pdu/finished.rs b/src/cfdp/pdu/finished.rs index dc410fb..529984c 100644 --- a/src/cfdp/pdu/finished.rs +++ b/src/cfdp/pdu/finished.rs @@ -1,7 +1,9 @@ use crate::cfdp::pdu::{ add_pdu_crc, generic_length_checks_pdu_deserialization, FileDirectiveType, PduError, PduHeader, }; -use crate::cfdp::tlv::{EntityIdTlv, Tlv, TlvType, TlvTypeField}; +use crate::cfdp::tlv::{ + EntityIdTlv, FilestoreResponseTlv, GenericTlv, Tlv, TlvType, TlvTypeField, WritableTlv, +}; use crate::cfdp::{ConditionCode, CrcFlag, Direction, PduType, TlvLvError}; use crate::ByteConversionError; use num_enum::{IntoPrimitive, TryFromPrimitive}; @@ -32,17 +34,17 @@ pub enum FileStatus { /// /// For more information, refer to CFDP chapter 5.2.3. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct FinishedPdu<'fs_responses> { +pub struct FinishedPduCreator<'fs_responses> { pdu_header: PduHeader, condition_code: ConditionCode, delivery_code: DeliveryCode, file_status: FileStatus, - fs_responses: Option<&'fs_responses [u8]>, + fs_responses: + &'fs_responses [FilestoreResponseTlv<'fs_responses, 'fs_responses, 'fs_responses>], fault_location: Option, } -impl<'fs_responses> FinishedPdu<'fs_responses> { +impl<'fs_responses> FinishedPduCreator<'fs_responses> { /// Default finished PDU: No error (no fault location field) and no filestore responses. pub fn new_default( pdu_header: PduHeader, @@ -54,7 +56,7 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { ConditionCode::NoError, delivery_code, file_status, - None, + &[], None, ) } @@ -71,7 +73,7 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { condition_code, delivery_code, file_status, - None, + &[], Some(fault_location), ) } @@ -81,7 +83,11 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { condition_code: ConditionCode, delivery_code: DeliveryCode, file_status: FileStatus, - fs_responses: Option<&'fs_responses [u8]>, + fs_responses: &'fs_responses [FilestoreResponseTlv< + 'fs_responses, + 'fs_responses, + 'fs_responses, + >], fault_location: Option, ) -> Self { pdu_header.pdu_type = PduType::FileDirective; @@ -111,7 +117,8 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { self.file_status } - pub fn filestore_responses(&self) -> Option<&'fs_responses [u8]> { + // If there are no filestore responses, an empty slice will be returned. + pub fn filestore_responses(&self) -> &[FilestoreResponseTlv<'_, '_, '_>] { self.fs_responses } @@ -121,8 +128,8 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { fn calc_pdu_datafield_len(&self) -> usize { let mut datafield_len = 2; - if let Some(fs_responses) = self.fs_responses { - datafield_len += fs_responses.len(); + for fs_response in self.fs_responses { + datafield_len += fs_response.len_full(); } if let Some(fault_location) = self.fault_location { datafield_len += fault_location.len_full(); @@ -132,9 +139,103 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { } datafield_len } +} + +impl CfdpPdu for FinishedPduCreator<'_> { + fn pdu_header(&self) -> &PduHeader { + &self.pdu_header + } + + fn file_directive_type(&self) -> Option { + Some(FileDirectiveType::FinishedPdu) + } +} + +impl WritablePduPacket for FinishedPduCreator<'_> { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result { + let expected_len = self.len_written(); + if buf.len() < expected_len { + return Err(ByteConversionError::ToSliceTooSmall { + found: buf.len(), + expected: expected_len, + } + .into()); + } + + let mut current_idx = self.pdu_header.write_to_bytes(buf)?; + buf[current_idx] = FileDirectiveType::FinishedPdu as u8; + current_idx += 1; + buf[current_idx] = ((self.condition_code as u8) << 4) + | ((self.delivery_code as u8) << 2) + | self.file_status as u8; + current_idx += 1; + for fs_responses in self.fs_responses { + current_idx += fs_responses.write_to_bytes(&mut buf[current_idx..])?; + } + if let Some(fault_location) = self.fault_location { + current_idx += fault_location.write_to_bytes(&mut buf[current_idx..])?; + } + if self.crc_flag() == CrcFlag::WithCrc { + current_idx = add_pdu_crc(buf, current_idx); + } + Ok(current_idx) + } + + fn len_written(&self) -> usize { + self.pdu_header.header_len() + self.calc_pdu_datafield_len() + } +} + +/// Helper structure to loop through all filestore responses of a read Finished PDU. It should be +/// noted that iterators in Rust are not fallible, but the TLV creation can fail, for example if +/// the raw TLV data is invalid for some reason. In that case, the iterator will yield [None] +/// because there is no way to recover from this. +/// +/// The user can accumulate the length of all TLVs yielded by the iterator and compare it against +/// the full length of the options to check whether the iterator was able to parse all TLVs +/// successfully. +pub struct FilestoreResponseIterator<'buf> { + responses_buf: &'buf [u8], + current_idx: usize, +} + +impl<'buf> Iterator for FilestoreResponseIterator<'buf> { + type Item = FilestoreResponseTlv<'buf, 'buf, 'buf>; + + fn next(&mut self) -> Option { + if self.current_idx == self.responses_buf.len() { + return None; + } + let tlv = FilestoreResponseTlv::from_bytes(&self.responses_buf[self.current_idx..]); + // There are not really fallible iterators so we can't continue here.. + if tlv.is_err() { + return None; + } + let tlv = tlv.unwrap(); + self.current_idx += tlv.len_full(); + Some(tlv) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct FinishedPduReader<'buf> { + pdu_header: PduHeader, + condition_code: ConditionCode, + delivery_code: DeliveryCode, + file_status: FileStatus, + fs_responses_raw: &'buf [u8], + fault_location: Option, +} + +impl<'buf> FinishedPduReader<'buf> { + /// Generates [Self] from a raw bytestream. + pub fn new(buf: &'buf [u8]) -> Result { + Self::from_bytes(buf) + } /// Generates [Self] from a raw bytestream. - pub fn from_bytes(buf: &'fs_responses [u8]) -> Result { + pub fn from_bytes(buf: &'buf [u8]) -> Result { let (pdu_header, mut current_idx) = PduHeader::from_bytes(buf)?; let full_len_without_crc = pdu_header.verify_length_and_checksum(buf)?; let min_expected_len = current_idx + 2; @@ -158,24 +259,51 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { let delivery_code = DeliveryCode::try_from((buf[current_idx] >> 2) & 0b1).unwrap(); let file_status = FileStatus::try_from(buf[current_idx] & 0b11).unwrap(); current_idx += 1; - let (fs_responses, fault_location) = + let (fs_responses_raw, fault_location) = Self::parse_tlv_fields(current_idx, full_len_without_crc, buf)?; Ok(Self { pdu_header, condition_code, delivery_code, file_status, - fs_responses, + fs_responses_raw, fault_location, }) } + pub fn fs_responses_raw(&self) -> &[u8] { + self.fs_responses_raw + } + + pub fn fs_responses_iter(&self) -> FilestoreResponseIterator<'_> { + FilestoreResponseIterator { + responses_buf: self.fs_responses_raw, + current_idx: 0, + } + } + + pub fn condition_code(&self) -> ConditionCode { + self.condition_code + } + + pub fn delivery_code(&self) -> DeliveryCode { + self.delivery_code + } + + pub fn file_status(&self) -> FileStatus { + self.file_status + } + + pub fn fault_location(&self) -> Option { + self.fault_location + } + fn parse_tlv_fields( mut current_idx: usize, full_len_without_crc: usize, - buf: &'fs_responses [u8], - ) -> Result<(Option<&'fs_responses [u8]>, Option), PduError> { - let mut fs_responses = None; + buf: &[u8], + ) -> Result<(&[u8], Option), PduError> { + let mut fs_responses: &[u8] = &[]; let mut fault_location = None; let start_of_fs_responses = current_idx; // There are leftover filestore response(s) and/or a fault location field. @@ -186,12 +314,12 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { if tlv_type == TlvType::FilestoreResponse { current_idx += next_tlv.len_full(); if current_idx == full_len_without_crc { - fs_responses = Some(&buf[start_of_fs_responses..current_idx]); + fs_responses = &buf[start_of_fs_responses..current_idx]; } } else if tlv_type == TlvType::EntityId { // At least one FS response is included. - if current_idx > full_len_without_crc { - fs_responses = Some(&buf[start_of_fs_responses..current_idx]); + if current_idx > start_of_fs_responses { + fs_responses = &buf[start_of_fs_responses..current_idx]; } fault_location = Some(EntityIdTlv::from_bytes(&buf[current_idx..])?); current_idx += fault_location.as_ref().unwrap().len_full(); @@ -202,11 +330,19 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { return Err(PduError::FormatError); } } else { - return Err(TlvLvError::InvalidTlvTypeField((tlv_type as u8, None)).into()); + return Err(TlvLvError::InvalidTlvTypeField { + found: tlv_type.into(), + expected: Some(TlvType::FilestoreResponse.into()), + } + .into()); } } TlvTypeField::Custom(raw) => { - return Err(TlvLvError::InvalidTlvTypeField((raw, None)).into()); + return Err(TlvLvError::InvalidTlvTypeField { + found: raw, + expected: None, + } + .into()); } } } @@ -214,7 +350,7 @@ impl<'fs_responses> FinishedPdu<'fs_responses> { } } -impl CfdpPdu for FinishedPdu<'_> { +impl CfdpPdu for FinishedPduReader<'_> { fn pdu_header(&self) -> &PduHeader { &self.pdu_header } @@ -224,49 +360,35 @@ impl CfdpPdu for FinishedPdu<'_> { } } -impl WritablePduPacket for FinishedPdu<'_> { - fn write_to_bytes(&self, buf: &mut [u8]) -> Result { - let expected_len = self.len_written(); - if buf.len() < expected_len { - return Err(ByteConversionError::ToSliceTooSmall { - found: buf.len(), - expected: expected_len, - } - .into()); - } - - let mut current_idx = self.pdu_header.write_to_bytes(buf)?; - buf[current_idx] = FileDirectiveType::FinishedPdu as u8; - current_idx += 1; - buf[current_idx] = ((self.condition_code as u8) << 4) - | ((self.delivery_code as u8) << 2) - | self.file_status as u8; - current_idx += 1; - if let Some(fs_responses) = self.fs_responses { - buf[current_idx..current_idx + fs_responses.len()].copy_from_slice(fs_responses); - current_idx += fs_responses.len(); - } - if let Some(fault_location) = self.fault_location { - current_idx += fault_location.write_to_be_bytes(&mut buf[current_idx..])?; - } - if self.crc_flag() == CrcFlag::WithCrc { - current_idx = add_pdu_crc(buf, current_idx); - } - Ok(current_idx) +impl PartialEq> for FinishedPduReader<'_> { + fn eq(&self, other: &FinishedPduCreator<'_>) -> bool { + self.pdu_header == other.pdu_header + && self.condition_code == other.condition_code + && self.delivery_code == other.delivery_code + && self.file_status == other.file_status + && self.fault_location == other.fault_location + && self + .fs_responses_iter() + .zip(other.filestore_responses().iter()) + .all(|(a, b)| a == *b) } +} - fn len_written(&self) -> usize { - self.pdu_header.header_len() + self.calc_pdu_datafield_len() +impl PartialEq> for FinishedPduCreator<'_> { + fn eq(&self, other: &FinishedPduReader<'_>) -> bool { + other.eq(self) } } #[cfg(test)] mod tests { use super::*; + use crate::cfdp::lv::Lv; use crate::cfdp::pdu::tests::{ common_pdu_conf, verify_raw_header, TEST_DEST_ID, TEST_SEQ_NUM, TEST_SRC_ID, }; use crate::cfdp::pdu::{FileDirectiveType, PduHeader}; + use crate::cfdp::tlv::FilestoreResponseTlv; use crate::cfdp::{ConditionCode, CrcFlag, Direction, LargeFileFlag, TransmissionMode}; fn generic_finished_pdu( @@ -274,9 +396,9 @@ mod tests { fss: LargeFileFlag, delivery_code: DeliveryCode, file_status: FileStatus, - ) -> FinishedPdu<'static> { + ) -> FinishedPduCreator<'static> { let pdu_header = PduHeader::new_no_file_data(common_pdu_conf(crc_flag, fss), 0); - FinishedPdu::new_default(pdu_header, delivery_code, file_status) + FinishedPduCreator::new_default(pdu_header, delivery_code, file_status) } #[test] @@ -294,7 +416,7 @@ mod tests { ); assert_eq!(finished_pdu.delivery_code(), DeliveryCode::Complete); assert_eq!(finished_pdu.file_status(), FileStatus::Retained); - assert_eq!(finished_pdu.filestore_responses(), None); + assert_eq!(finished_pdu.filestore_responses(), &[]); assert_eq!(finished_pdu.fault_location(), None); assert_eq!(finished_pdu.pdu_header().pdu_datafield_len, 2); @@ -326,6 +448,7 @@ mod tests { let written = finished_pdu.write_to_bytes(&mut buf); assert!(written.is_ok()); let written = written.unwrap(); + assert_eq!(written, 9); assert_eq!(written, finished_pdu.len_written()); assert_eq!(written, finished_pdu.pdu_header().header_len() + 2); assert_eq!( @@ -387,10 +510,37 @@ mod tests { ); let mut buf: [u8; 64] = [0; 64]; finished_pdu.write_to_bytes(&mut buf).unwrap(); - let read_back = FinishedPdu::from_bytes(&buf); + let read_back = FinishedPduReader::from_bytes(&buf); assert!(read_back.is_ok()); let read_back = read_back.unwrap(); assert_eq!(finished_pdu, read_back); + // Use all getter functions here explicitely once. + assert_eq!(finished_pdu.pdu_header(), read_back.pdu_header()); + assert_eq!(finished_pdu.condition_code(), read_back.condition_code()); + assert_eq!(finished_pdu.fault_location(), read_back.fault_location()); + assert_eq!(finished_pdu.file_status(), read_back.file_status()); + assert_eq!(finished_pdu.delivery_code(), read_back.delivery_code()); + } + + #[test] + fn test_serialization_buf_too_small() { + let finished_pdu = generic_finished_pdu( + CrcFlag::NoCrc, + LargeFileFlag::Normal, + DeliveryCode::Complete, + FileStatus::Retained, + ); + let mut buf: [u8; 8] = [0; 8]; + let error = finished_pdu.write_to_bytes(&mut buf); + assert!(error.is_err()); + if let PduError::ByteConversion(ByteConversionError::ToSliceTooSmall { found, expected }) = + error.unwrap_err() + { + assert_eq!(found, 8); + assert_eq!(expected, 9); + } else { + panic!("expected to_slice_too_small error"); + } } #[test] @@ -404,15 +554,125 @@ mod tests { let mut buf: [u8; 64] = [0; 64]; let written = finished_pdu.write_to_bytes(&mut buf).unwrap(); assert_eq!(written, finished_pdu.len_written()); - let finished_pdu_from_raw = FinishedPdu::from_bytes(&buf).unwrap(); - assert_eq!(finished_pdu_from_raw, finished_pdu); + let finished_pdu_from_raw = FinishedPduReader::new(&buf).unwrap(); + assert_eq!(finished_pdu, finished_pdu_from_raw); buf[written - 1] -= 1; let crc: u16 = ((buf[written - 2] as u16) << 8) as u16 | buf[written - 1] as u16; - let error = FinishedPdu::from_bytes(&buf).unwrap_err(); + let error = FinishedPduReader::new(&buf).unwrap_err(); if let PduError::ChecksumError(e) = error { assert_eq!(e, crc); } else { panic!("expected crc error"); } } + + #[test] + fn test_with_fault_location() { + let pdu_header = + PduHeader::new_no_file_data(common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal), 0); + let finished_pdu = FinishedPduCreator::new_with_error( + pdu_header, + ConditionCode::NakLimitReached, + DeliveryCode::Incomplete, + FileStatus::DiscardDeliberately, + EntityIdTlv::new(TEST_DEST_ID.into()), + ); + let finished_pdu_vec = finished_pdu.to_vec().unwrap(); + assert_eq!(finished_pdu_vec.len(), 12); + assert_eq!(finished_pdu_vec[9], TlvType::EntityId.into()); + assert_eq!(finished_pdu_vec[10], 1); + assert_eq!(finished_pdu_vec[11], TEST_DEST_ID.value()); + assert_eq!( + finished_pdu.fault_location().unwrap().entity_id(), + &TEST_DEST_ID.into() + ); + } + + #[test] + fn test_deserialization_with_fault_location() { + let pdu_header = + PduHeader::new_no_file_data(common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal), 0); + let entity_id_tlv = EntityIdTlv::new(TEST_DEST_ID.into()); + let finished_pdu = FinishedPduCreator::new_with_error( + pdu_header, + ConditionCode::NakLimitReached, + DeliveryCode::Incomplete, + FileStatus::DiscardDeliberately, + entity_id_tlv, + ); + let finished_pdu_vec = finished_pdu.to_vec().unwrap(); + let finished_pdu_deserialized = FinishedPduReader::from_bytes(&finished_pdu_vec).unwrap(); + assert_eq!(finished_pdu, finished_pdu_deserialized); + } + + #[test] + fn test_deserialization_with_fs_responses() { + let entity_id_tlv = EntityIdTlv::new(TEST_DEST_ID.into()); + let first_name = "first.txt"; + let first_name_lv = Lv::new_from_str(first_name).unwrap(); + let fs_response_0 = FilestoreResponseTlv::new_no_filestore_message( + crate::cfdp::tlv::FilestoreActionCode::CreateFile, + 0, + first_name_lv, + None, + ) + .unwrap(); + let fs_response_1 = FilestoreResponseTlv::new_no_filestore_message( + crate::cfdp::tlv::FilestoreActionCode::DeleteFile, + 0, + first_name_lv, + None, + ) + .unwrap(); + let fs_responses = &[fs_response_0, fs_response_1]; + + let pdu_header = + PduHeader::new_no_file_data(common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal), 0); + let finished_pdu = FinishedPduCreator::new_generic( + pdu_header, + ConditionCode::NakLimitReached, + DeliveryCode::Incomplete, + FileStatus::DiscardDeliberately, + fs_responses, + Some(entity_id_tlv), + ); + let finished_pdu_vec = finished_pdu.to_vec().unwrap(); + let finished_pdu_deserialized = FinishedPduReader::from_bytes(&finished_pdu_vec).unwrap(); + assert_eq!(finished_pdu_deserialized, finished_pdu); + } + + #[test] + fn test_deserialization_with_fs_responses_and_fault_location() { + let first_name = "first.txt"; + let first_name_lv = Lv::new_from_str(first_name).unwrap(); + let fs_response_0 = FilestoreResponseTlv::new_no_filestore_message( + crate::cfdp::tlv::FilestoreActionCode::CreateFile, + 0, + first_name_lv, + None, + ) + .unwrap(); + let fs_response_1 = FilestoreResponseTlv::new_no_filestore_message( + crate::cfdp::tlv::FilestoreActionCode::DeleteFile, + 0, + first_name_lv, + None, + ) + .unwrap(); + let fs_responses = &[fs_response_0, fs_response_1]; + + let pdu_header = + PduHeader::new_no_file_data(common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal), 0); + let finished_pdu = FinishedPduCreator::new_generic( + pdu_header, + ConditionCode::NakLimitReached, + DeliveryCode::Incomplete, + FileStatus::DiscardDeliberately, + fs_responses, + None, + ); + let finished_pdu_vec = finished_pdu.to_vec().unwrap(); + let finished_pdu_deserialized = FinishedPduReader::from_bytes(&finished_pdu_vec).unwrap(); + assert_eq!(finished_pdu_deserialized, finished_pdu); + } } diff --git a/src/cfdp/pdu/metadata.rs b/src/cfdp/pdu/metadata.rs index 5ed70d0..920c63c 100644 --- a/src/cfdp/pdu/metadata.rs +++ b/src/cfdp/pdu/metadata.rs @@ -3,7 +3,7 @@ use crate::cfdp::pdu::{ add_pdu_crc, generic_length_checks_pdu_deserialization, read_fss_field, write_fss_field, FileDirectiveType, PduError, PduHeader, }; -use crate::cfdp::tlv::Tlv; +use crate::cfdp::tlv::{Tlv, WritableTlv}; use crate::cfdp::{ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType}; use crate::ByteConversionError; #[cfg(feature = "alloc")] @@ -340,6 +340,8 @@ impl CfdpPdu for MetadataPduReader<'_> { #[cfg(test)] pub mod tests { + use alloc::string::ToString; + use crate::cfdp::lv::Lv; use crate::cfdp::pdu::metadata::{ build_metadata_opts_from_slice, build_metadata_opts_from_vec, MetadataGenericParams, @@ -348,7 +350,7 @@ pub mod tests { use crate::cfdp::pdu::tests::{ common_pdu_conf, verify_raw_header, TEST_DEST_ID, TEST_SEQ_NUM, TEST_SRC_ID, }; - use crate::cfdp::pdu::{CfdpPdu, WritablePduPacket}; + use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket}; use crate::cfdp::pdu::{FileDirectiveType, PduHeader}; use crate::cfdp::tlv::{Tlv, TlvType}; use crate::cfdp::{ @@ -600,6 +602,45 @@ pub mod tests { assert_eq!(accumulated_len, pdu_read_back.options().len()); } + #[test] + fn test_invalid_directive_code() { + let (_, _, metadata_pdu) = generic_metadata_pdu(CrcFlag::NoCrc, LargeFileFlag::Large, &[]); + let mut metadata_vec = metadata_pdu.to_vec().unwrap(); + metadata_vec[7] = 0xff; + let metadata_error = MetadataPduReader::from_bytes(&metadata_vec); + assert!(metadata_error.is_err()); + let error = metadata_error.unwrap_err(); + if let PduError::InvalidDirectiveType { found, expected } = error { + assert_eq!(found, 0xff); + assert_eq!(expected, Some(FileDirectiveType::MetadataPdu)); + assert_eq!( + error.to_string(), + "invalid directive type value 255, expected Some(MetadataPdu)" + ); + } else { + panic!("Expected InvalidDirectiveType error, got {:?}", error); + } + } + + #[test] + fn test_wrong_directive_code() { + let (_, _, metadata_pdu) = generic_metadata_pdu(CrcFlag::NoCrc, LargeFileFlag::Large, &[]); + let mut metadata_vec = metadata_pdu.to_vec().unwrap(); + metadata_vec[7] = FileDirectiveType::EofPdu as u8; + let metadata_error = MetadataPduReader::from_bytes(&metadata_vec); + assert!(metadata_error.is_err()); + let error = metadata_error.unwrap_err(); + if let PduError::WrongDirectiveType { found, expected } = error { + assert_eq!(found, FileDirectiveType::EofPdu); + assert_eq!(expected, FileDirectiveType::MetadataPdu); + assert_eq!( + error.to_string(), + "found directive type EofPdu, expected MetadataPdu" + ); + } else { + panic!("Expected InvalidDirectiveType error, got {:?}", error); + } + } #[test] fn test_corrects_pdu_header() { let pdu_header = PduHeader::new_for_file_data( diff --git a/src/cfdp/pdu/mod.rs b/src/cfdp/pdu/mod.rs index 3f7d344..2fb8516 100644 --- a/src/cfdp/pdu/mod.rs +++ b/src/cfdp/pdu/mod.rs @@ -32,7 +32,7 @@ pub enum FileDirectiveType { #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum PduError { - ByteConversionError(ByteConversionError), + ByteConversion(ByteConversionError), /// Found version ID invalid, not equal to [CFDP_VERSION_2]. CfdpVersionMissmatch(u8), /// Invalid length for the entity ID detected. Only the values 1, 2, 4 and 8 are supported. @@ -55,7 +55,6 @@ pub enum PduError { found: u8, expected: Option, }, - InvalidSegmentRequestFormat, InvalidStartOrEndOfScopeValue, /// Invalid condition code. Contains the raw detected value. InvalidConditionCode(u8), @@ -80,9 +79,6 @@ impl Display for PduError { "invalid PDU entity ID length {raw_id}, only [1, 2, 4, 8] are allowed" ) } - PduError::InvalidSegmentRequestFormat => { - write!(f, "invalid segment request format for NAK PDU") - } PduError::InvalidStartOrEndOfScopeValue => { write!(f, "invalid start or end of scope for NAK PDU") } @@ -107,7 +103,7 @@ impl Display for PduError { "missmatch of PDU source length {src_id_len} and destination length {dest_id_len}" ) } - PduError::ByteConversionError(e) => { + PduError::ByteConversion(e) => { write!(f, "{}", e) } PduError::FileSizeTooLarge(value) => { @@ -145,7 +141,7 @@ impl Display for PduError { impl Error for PduError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { - PduError::ByteConversionError(e) => Some(e), + PduError::ByteConversion(e) => Some(e), PduError::TlvLvError(e) => Some(e), _ => None, } @@ -154,7 +150,7 @@ impl Error for PduError { impl From for PduError { fn from(value: ByteConversionError) -> Self { - Self::ByteConversionError(value) + Self::ByteConversion(value) } } @@ -526,7 +522,7 @@ impl PduHeader { /// function. pub fn from_bytes(buf: &[u8]) -> Result<(Self, usize), PduError> { if buf.len() < FIXED_HEADER_LEN { - return Err(PduError::ByteConversionError( + return Err(PduError::ByteConversion( ByteConversionError::FromSliceTooSmall { found: buf.len(), expected: FIXED_HEADER_LEN, @@ -689,6 +685,8 @@ pub(crate) fn add_pdu_crc(buf: &mut [u8], mut current_idx: usize) -> usize { #[cfg(test)] mod tests { + use alloc::string::ToString; + use crate::cfdp::pdu::{CommonPduConfig, PduError, PduHeader, FIXED_HEADER_LEN}; use crate::cfdp::{ CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag, SegmentationControl, @@ -853,6 +851,7 @@ mod tests { // 4 byte fixed header plus three bytes src, dest ID and transaction ID assert_eq!(res.unwrap(), 7); verify_raw_header(&pdu_header, &buf); + assert_eq!(pdu_header.pdu_datafield_len(), 5); } #[test] @@ -946,6 +945,10 @@ mod tests { let error = res.unwrap_err(); if let PduError::CfdpVersionMissmatch(raw_version) = error { assert_eq!(raw_version, CFDP_VERSION_2 + 1); + assert_eq!( + error.to_string(), + "cfdp version missmatch, found 2, expected 1" + ); } else { panic!("invalid exception: {}", error); } @@ -957,7 +960,7 @@ mod tests { let res = PduHeader::from_bytes(&buf); assert!(res.is_err()); let error = res.unwrap_err(); - if let PduError::ByteConversionError(ByteConversionError::FromSliceTooSmall { + if let PduError::ByteConversion(ByteConversionError::FromSliceTooSmall { found, expected, }) = error @@ -983,13 +986,17 @@ mod tests { let header = PduHeader::from_bytes(&buf[0..6]); assert!(header.is_err()); let error = header.unwrap_err(); - if let PduError::ByteConversionError(ByteConversionError::FromSliceTooSmall { + if let PduError::ByteConversion(ByteConversionError::FromSliceTooSmall { found, expected, }) = error { assert_eq!(found, 6); assert_eq!(expected, 7); + assert_eq!( + error.to_string(), + "source slice with size 6 too small, expected at least 7 bytes" + ); } } @@ -1017,6 +1024,10 @@ mod tests { let error = pdu_conf_res.unwrap_err(); if let PduError::InvalidEntityLen(len) = error { assert_eq!(len, 3); + assert_eq!( + error.to_string(), + "invalid PDU entity ID length 3, only [1, 2, 4, 8] are allowed" + ); } else { panic!("Invalid exception: {}", error) } @@ -1037,6 +1048,10 @@ mod tests { { assert_eq!(src_id_len, 1); assert_eq!(dest_id_len, 2); + assert_eq!( + error.to_string(), + "missmatch of PDU source length 1 and destination length 2" + ); } } @@ -1087,4 +1102,20 @@ mod tests { panic!("invalid exception {:?}", error) } } + + #[test] + fn test_pdu_error_clonable_and_comparable() { + let pdu_error = PduError::InvalidEntityLen(0); + let pdu_error_2 = pdu_error; + assert_eq!(pdu_error, pdu_error_2); + } + + #[test] + fn test_pdu_config_clonable_and_comparable() { + let common_pdu_cfg_0 = + CommonPduConfig::new_with_byte_fields(UbfU8::new(1), UbfU8::new(2), UbfU8::new(3)) + .expect("common config creation failed"); + let common_pdu_cfg_1 = common_pdu_cfg_0; + assert_eq!(common_pdu_cfg_0, common_pdu_cfg_1); + } } diff --git a/src/cfdp/pdu/nak.rs b/src/cfdp/pdu/nak.rs index 22d18f2..3a7a38a 100644 --- a/src/cfdp/pdu/nak.rs +++ b/src/cfdp/pdu/nak.rs @@ -398,7 +398,7 @@ impl<'seg_reqs> NakPduReader<'seg_reqs> { let end_of_scope; if pdu_header.common_pdu_conf().file_flag == LargeFileFlag::Large { if current_idx + 16 > buf.len() { - return Err(PduError::ByteConversionError( + return Err(PduError::ByteConversion( ByteConversionError::FromSliceTooSmall { found: buf.len(), expected: current_idx + 16, @@ -501,6 +501,8 @@ impl<'a, 'b> PartialEq> for NakPduReader<'b> { #[cfg(test)] mod tests { + use alloc::string::ToString; + use crate::cfdp::{ pdu::tests::{common_pdu_conf, verify_raw_header, TEST_DEST_ID, TEST_SEQ_NUM, TEST_SRC_ID}, PduType, TransmissionMode, @@ -734,14 +736,22 @@ mod tests { let pdu_conf = common_pdu_conf(CrcFlag::NoCrc, LargeFileFlag::Normal); let pdu_header = PduHeader::new_no_file_data(pdu_conf, 0); let u32_list = SegmentRequests::U32Pairs(&[(0, 50), (50, 100)]); - if let Err(PduError::InvalidStartOrEndOfScopeValue) = NakPduCreator::new_generic( + //let error = NakPduCreator::new_generic(pdu_header, 100, 300, Some(u32_list)); + let error = NakPduCreator::new_generic( pdu_header, u32::MAX as u64 + 1, u32::MAX as u64 + 2, Some(u32_list), - ) { + ); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let PduError::InvalidStartOrEndOfScopeValue = error { + assert_eq!( + error.to_string(), + "invalid start or end of scope for NAK PDU" + ); } else { - panic!("API call did not fail"); + panic!("unexpected error {error}"); } } @@ -758,7 +768,7 @@ mod tests { assert!(error.is_err()); let e = error.unwrap_err(); match e { - PduError::ByteConversionError(conv_error) => match conv_error { + PduError::ByteConversion(conv_error) => match conv_error { ByteConversionError::ToSliceTooSmall { found, expected } => { assert_eq!(expected, nak_pdu.len_written()); assert_eq!(found, 5); diff --git a/src/cfdp/tlv/mod.rs b/src/cfdp/tlv/mod.rs index 2e779a1..685098d 100644 --- a/src/cfdp/tlv/mod.rs +++ b/src/cfdp/tlv/mod.rs @@ -5,6 +5,10 @@ use crate::cfdp::lv::{ use crate::cfdp::TlvLvError; use crate::util::{UnsignedByteField, UnsignedByteFieldError, UnsignedEnum}; use crate::ByteConversionError; +#[cfg(feature = "alloc")] +use alloc::vec; +#[cfg(feature = "alloc")] +use alloc::vec::Vec; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -13,6 +17,39 @@ pub mod msg_to_user; pub const MIN_TLV_LEN: usize = 2; +pub trait GenericTlv { + fn tlv_type_field(&self) -> TlvTypeField; + + /// Checks whether the type field contains one of the standard types specified in the CFDP + /// standard and is part of the [TlvType] enum. + fn is_standard_tlv(&self) -> bool { + if let TlvTypeField::Standard(_) = self.tlv_type_field() { + return true; + } + false + } + + /// Returns the standard TLV type if the TLV field is not a custom field + fn tlv_type(&self) -> Option { + if let TlvTypeField::Standard(tlv_type) = self.tlv_type_field() { + Some(tlv_type) + } else { + None + } + } +} + +pub trait WritableTlv { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result; + fn len_written(&self) -> usize; + #[cfg(feature = "alloc")] + fn to_vec(&self) -> Vec { + let mut buf = vec![0; self.len_written()]; + self.write_to_bytes(&mut buf).unwrap(); + buf + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[repr(u8)] @@ -95,6 +132,13 @@ impl<'data> Tlv<'data> { }) } + pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result { + Ok(Tlv { + tlv_type_field: TlvTypeField::Custom(tlv_type), + lv: Lv::new(data)?, + }) + } + /// Creates a TLV with an empty value field. pub fn new_empty(tlv_type: TlvType) -> Tlv<'data> { Tlv { @@ -103,53 +147,24 @@ impl<'data> Tlv<'data> { } } - /// Checks whether the type field contains one of the standard types specified in the CFDP - /// standard and is part of the [TlvType] enum. - pub fn is_standard_tlv(&self) -> bool { - if let TlvTypeField::Standard(_) = self.tlv_type_field { - return true; - } - false - } - - /// Returns the standard TLV type if the TLV field is not a custom field - pub fn tlv_type(&self) -> Option { - if let TlvTypeField::Standard(tlv_type) = self.tlv_type_field { - Some(tlv_type) - } else { - None - } - } - - pub fn tlv_type_field(&self) -> TlvTypeField { - self.tlv_type_field - } - - pub fn write_to_bytes(&self, buf: &mut [u8]) -> Result { - generic_len_check_data_serialization(buf, self.value().len(), MIN_TLV_LEN)?; - buf[0] = self.tlv_type_field.into(); - self.lv.write_to_be_bytes_no_len_check(&mut buf[1..]); - Ok(self.len_full()) - } - pub fn value(&self) -> &[u8] { self.lv.value() } + /// Checks whether the value field is empty. + pub fn is_empty(&self) -> bool { + self.value().is_empty() + } + /// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of /// [Self::value] pub fn len_value(&self) -> usize { - self.lv.len_value() + self.value().len() } /// Returns the full raw length, including the length byte. pub fn len_full(&self) -> usize { - self.lv.len_full() + 1 - } - - /// Checks whether the value field is empty. - pub fn is_empty(&self) -> bool { - self.lv.is_empty() + self.len_value() + 2 } /// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the @@ -175,14 +190,34 @@ impl<'data> Tlv<'data> { } } +impl WritableTlv for Tlv<'_> { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result { + generic_len_check_data_serialization(buf, self.value().len(), MIN_TLV_LEN)?; + buf[0] = self.tlv_type_field.into(); + self.lv.write_to_be_bytes_no_len_check(&mut buf[1..]); + Ok(self.len_full()) + } + fn len_written(&self) -> usize { + self.len_full() + } +} + +impl GenericTlv for Tlv<'_> { + fn tlv_type_field(&self) -> TlvTypeField { + self.tlv_type_field + } +} + pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> { - let tlv_type = TlvType::try_from(raw_type) - .map_err(|_| TlvLvError::InvalidTlvTypeField((raw_type, Some(expected_tlv_type as u8))))?; + let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField { + found: raw_type, + expected: Some(expected_tlv_type.into()), + })?; if tlv_type != expected_tlv_type { - return Err(TlvLvError::InvalidTlvTypeField(( - tlv_type as u8, - Some(expected_tlv_type as u8), - ))); + return Err(TlvLvError::InvalidTlvTypeField { + found: tlv_type as u8, + expected: Some(expected_tlv_type as u8), + }); } Ok(()) } @@ -208,6 +243,10 @@ impl EntityIdTlv { Ok(()) } + pub fn entity_id(&self) -> &UnsignedByteField { + &self.entity_id + } + pub fn len_value(&self) -> usize { self.entity_id.size() } @@ -216,13 +255,6 @@ impl EntityIdTlv { 2 + self.entity_id.size() } - pub fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result { - Self::len_check(buf)?; - buf[0] = TlvType::EntityId as u8; - buf[1] = self.entity_id.size() as u8; - self.entity_id.write_to_be_bytes(&mut buf[2..]) - } - pub fn from_bytes(buf: &[u8]) -> Result { Self::len_check(buf)?; verify_tlv_type(buf[0], TlvType::EntityId)?; @@ -241,13 +273,32 @@ impl EntityIdTlv { self.entity_id .write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?; Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).map_err(|e| match e { - TlvLvError::ByteConversionError(e) => e, + TlvLvError::ByteConversion(e) => e, // All other errors are impossible. _ => panic!("unexpected TLV error"), }) } } +impl WritableTlv for EntityIdTlv { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result { + Self::len_check(buf)?; + buf[0] = TlvType::EntityId as u8; + buf[1] = self.entity_id.size() as u8; + Ok(2 + self.entity_id.write_to_be_bytes(&mut buf[2..])?) + } + + fn len_written(&self) -> usize { + self.len_full() + } +} + +impl GenericTlv for EntityIdTlv { + fn tlv_type_field(&self) -> TlvTypeField { + TlvTypeField::Standard(TlvType::EntityId) + } +} + impl<'data> TryFrom> for EntityIdTlv { type Error = TlvLvError; @@ -255,17 +306,17 @@ impl<'data> TryFrom> for EntityIdTlv { match value.tlv_type_field { TlvTypeField::Standard(tlv_type) => { if tlv_type != TlvType::EntityId { - return Err(TlvLvError::InvalidTlvTypeField(( - tlv_type as u8, - Some(TlvType::EntityId as u8), - ))); + return Err(TlvLvError::InvalidTlvTypeField { + found: tlv_type as u8, + expected: Some(TlvType::EntityId as u8), + }); } } TlvTypeField::Custom(val) => { - return Err(TlvLvError::InvalidTlvTypeField(( - val, - Some(TlvType::EntityId as u8), - ))); + return Err(TlvLvError::InvalidTlvTypeField { + found: val, + expected: Some(TlvType::EntityId as u8), + }); } } let len_value = value.value().len(); @@ -285,23 +336,50 @@ impl<'data> TryFrom> for EntityIdTlv { } } +pub fn fs_request_has_second_filename(action_code: FilestoreActionCode) -> bool { + if action_code == FilestoreActionCode::RenameFile + || action_code == FilestoreActionCode::AppendFile + || action_code == FilestoreActionCode::ReplaceFile + { + return true; + } + false +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +struct FilestoreTlvBase<'first_name, 'second_name> { + pub action_code: FilestoreActionCode, + #[cfg_attr(feature = "serde", serde(borrow))] + pub first_name: Lv<'first_name>, + #[cfg_attr(feature = "serde", serde(borrow))] + pub second_name: Option>, +} + +impl FilestoreTlvBase<'_, '_> { + fn base_len_value(&self) -> usize { + let mut len = 1 + self.first_name.len_full(); + if let Some(second_name) = self.second_name { + len += second_name.len_full(); + } + len + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct FilestoreRequestTlv<'first_name, 'second_name> { - action_code: FilestoreActionCode, #[cfg_attr(feature = "serde", serde(borrow))] - first_name: Lv<'first_name>, - #[cfg_attr(feature = "serde", serde(borrow))] - second_name: Option>, + base: FilestoreTlvBase<'first_name, 'second_name>, } impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> { - pub fn new_create_file(first_name: Lv<'first_name>) -> Result { - Self::new(FilestoreActionCode::CreateFile, first_name, None) + pub fn new_create_file(file_name: Lv<'first_name>) -> Result { + Self::new(FilestoreActionCode::CreateFile, file_name, None) } - pub fn new_delete_file(first_name: Lv<'first_name>) -> Result { - Self::new(FilestoreActionCode::DeleteFile, first_name, None) + pub fn new_delete_file(file_name: Lv<'first_name>) -> Result { + Self::new(FilestoreActionCode::DeleteFile, file_name, None) } pub fn new_rename_file( @@ -363,14 +441,14 @@ impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> { /// only one is passed. It will also returns [None] if the cumulative length of the first /// name and the second name exceeds 255 bytes. /// - /// This is the case for the rename, append and replace filestore request. + /// Two file paths are required for the rename, append and replace filestore request. pub fn new( action_code: FilestoreActionCode, first_name: Lv<'first_name>, second_name: Option>, ) -> Result { let mut base_value_len = first_name.len_full(); - if Self::has_second_filename(action_code) { + if fs_request_has_second_filename(action_code) { if second_name.is_none() { return Err(TlvLvError::SecondNameMissing); } @@ -380,71 +458,34 @@ impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> { return Err(TlvLvError::InvalidValueLength(base_value_len)); } Ok(Self { - action_code, - first_name, - second_name, + base: FilestoreTlvBase { + action_code, + first_name, + second_name, + }, }) } - pub fn has_second_filename(action_code: FilestoreActionCode) -> bool { - if action_code == FilestoreActionCode::RenameFile - || action_code == FilestoreActionCode::AppendFile - || action_code == FilestoreActionCode::ReplaceFile - { - return true; - } - false - } - pub fn action_code(&self) -> FilestoreActionCode { - self.action_code + self.base.action_code } pub fn first_name(&self) -> Lv<'first_name> { - self.first_name + self.base.first_name } pub fn second_name(&self) -> Option> { - self.second_name + self.base.second_name } pub fn len_value(&self) -> usize { - let mut len = 1 + self.first_name.len_full(); - if let Some(second_name) = self.second_name { - len += second_name.len_full(); - } - len + self.base.base_len_value() } pub fn len_full(&self) -> usize { 2 + self.len_value() } - pub fn write_to_bytes(&self, buf: &mut [u8]) -> Result { - if buf.len() < self.len_full() { - return Err(ByteConversionError::ToSliceTooSmall { - found: buf.len(), - expected: self.len_full(), - }); - } - buf[0] = TlvType::FilestoreRequest as u8; - buf[1] = self.len_value() as u8; - buf[2] = (self.action_code as u8) << 4; - let mut current_idx = 3; - // Length checks were already performed. - self.first_name.write_to_be_bytes_no_len_check( - &mut buf[current_idx..current_idx + self.first_name.len_full()], - ); - current_idx += self.first_name.len_full(); - if let Some(second_name) = self.second_name { - second_name.write_to_be_bytes_no_len_check( - &mut buf[current_idx..current_idx + second_name.len_full()], - ); - current_idx += second_name.len_full(); - } - Ok(current_idx) - } - pub fn from_bytes<'longest: 'first_name + 'second_name>( buf: &'longest [u8], ) -> Result { @@ -465,26 +506,253 @@ impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> { let mut second_name = None; current_idx += first_name.len_full(); - if Self::has_second_filename(action_code) { + if fs_request_has_second_filename(action_code) { if current_idx >= 2 + len { return Err(TlvLvError::SecondNameMissing); } second_name = Some(Lv::from_bytes(&buf[current_idx..])?); } Ok(Self { + base: FilestoreTlvBase { + action_code, + first_name, + second_name, + }, + }) + } +} + +impl WritableTlv for FilestoreRequestTlv<'_, '_> { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result { + if buf.len() < self.len_full() { + return Err(ByteConversionError::ToSliceTooSmall { + found: buf.len(), + expected: self.len_full(), + }); + } + buf[0] = TlvType::FilestoreRequest as u8; + buf[1] = self.len_value() as u8; + buf[2] = (self.base.action_code as u8) << 4; + let mut current_idx = 3; + // Length checks were already performed. + self.base.first_name.write_to_be_bytes_no_len_check( + &mut buf[current_idx..current_idx + self.base.first_name.len_full()], + ); + current_idx += self.base.first_name.len_full(); + if let Some(second_name) = self.base.second_name { + second_name.write_to_be_bytes_no_len_check( + &mut buf[current_idx..current_idx + second_name.len_full()], + ); + current_idx += second_name.len_full(); + } + Ok(current_idx) + } + + fn len_written(&self) -> usize { + self.len_full() + } +} + +impl GenericTlv for FilestoreRequestTlv<'_, '_> { + fn tlv_type_field(&self) -> TlvTypeField { + TlvTypeField::Standard(TlvType::FilestoreRequest) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct FilestoreResponseTlv<'first_name, 'second_name, 'fs_msg> { + #[cfg_attr(feature = "serde", serde(borrow))] + base: FilestoreTlvBase<'first_name, 'second_name>, + status_code: u8, + #[cfg_attr(feature = "serde", serde(borrow))] + filestore_message: Lv<'fs_msg>, +} + +impl<'first_name, 'second_name, 'fs_msg> FilestoreResponseTlv<'first_name, 'second_name, 'fs_msg> { + /// This function will return [None] if the respective action code requires two names but + /// only one is passed. It will also returns [None] if the cumulative length of the first + /// name and the second name exceeds 255 bytes. + /// + /// Two file paths are required for the rename, append and replace filestore request. + pub fn new_no_filestore_message( + action_code: FilestoreActionCode, + status_code: u8, + first_name: Lv<'first_name>, + second_name: Option>, + ) -> Result { + Self::new( action_code, + status_code, first_name, second_name, + Lv::new_empty(), + ) + } + pub fn new( + action_code: FilestoreActionCode, + status_code: u8, + first_name: Lv<'first_name>, + second_name: Option>, + filestore_message: Lv<'fs_msg>, + ) -> Result { + let mut base_value_len = first_name.len_full(); + if Self::has_second_filename(action_code) { + if second_name.is_none() { + return Err(TlvLvError::SecondNameMissing); + } + base_value_len += second_name.as_ref().unwrap().len_full(); + } + if base_value_len > u8::MAX as usize { + return Err(TlvLvError::InvalidValueLength(base_value_len)); + } + Ok(Self { + base: FilestoreTlvBase { + action_code, + first_name, + second_name, + }, + status_code, + filestore_message, }) } + + pub fn has_second_filename(action_code: FilestoreActionCode) -> bool { + if action_code == FilestoreActionCode::RenameFile + || action_code == FilestoreActionCode::AppendFile + || action_code == FilestoreActionCode::ReplaceFile + { + return true; + } + false + } + + pub fn action_code(&self) -> FilestoreActionCode { + self.base.action_code + } + + pub fn status_code(&self) -> u8 { + self.status_code + } + + pub fn first_name(&self) -> Lv<'first_name> { + self.base.first_name + } + + pub fn second_name(&self) -> Option> { + self.base.second_name + } + + pub fn len_value(&self) -> usize { + self.base.base_len_value() + self.filestore_message.len_full() + } + + pub fn len_full(&self) -> usize { + 2 + self.len_value() + } + + pub fn from_bytes<'buf: 'first_name + 'second_name + 'fs_msg>( + buf: &'buf [u8], + ) -> Result { + if buf.len() < 2 { + return Err(ByteConversionError::FromSliceTooSmall { + found: buf.len(), + expected: 2, + } + .into()); + } + verify_tlv_type(buf[0], TlvType::FilestoreResponse)?; + let len = buf[1] as usize; + let mut current_idx = 2; + let len_check = |current_idx: &mut usize, add_len: usize| -> Result<(), TlvLvError> { + if *current_idx + add_len > buf.len() { + return Err(ByteConversionError::FromSliceTooSmall { + found: buf.len(), + expected: *current_idx, + } + .into()); + } + Ok(()) + }; + len_check(&mut current_idx, len)?; + let action_code = FilestoreActionCode::try_from((buf[2] >> 4) & 0b1111) + .map_err(|_| TlvLvError::InvalidFilestoreActionCode((buf[2] >> 4) & 0b1111))?; + let status_code = buf[2] & 0b1111; + current_idx += 1; + let first_name = Lv::from_bytes(&buf[current_idx..])?; + len_check(&mut current_idx, first_name.len_full())?; + current_idx += first_name.len_full(); + + let mut second_name = None; + if Self::has_second_filename(action_code) { + if current_idx >= 2 + len { + return Err(TlvLvError::SecondNameMissing); + } + let second_name_lv = Lv::from_bytes(&buf[current_idx..])?; + current_idx += second_name_lv.len_full(); + second_name = Some(second_name_lv); + } + let filestore_message = Lv::from_bytes(&buf[current_idx..])?; + len_check(&mut current_idx, filestore_message.len_full())?; + Ok(Self { + base: FilestoreTlvBase { + action_code, + first_name, + second_name, + }, + status_code, + filestore_message, + }) + } +} + +impl WritableTlv for FilestoreResponseTlv<'_, '_, '_> { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result { + if buf.len() < self.len_full() { + return Err(ByteConversionError::ToSliceTooSmall { + found: buf.len(), + expected: self.len_full(), + }); + } + buf[0] = TlvType::FilestoreResponse as u8; + buf[1] = self.len_value() as u8; + buf[2] = ((self.base.action_code as u8) << 4) | (self.status_code & 0b1111); + let mut current_idx = 3; + // Length checks were already performed. + self.base.first_name.write_to_be_bytes_no_len_check( + &mut buf[current_idx..current_idx + self.base.first_name.len_full()], + ); + current_idx += self.base.first_name.len_full(); + if let Some(second_name) = self.base.second_name { + current_idx += second_name.write_to_be_bytes_no_len_check( + &mut buf[current_idx..current_idx + second_name.len_full()], + ); + } + current_idx += self.filestore_message.write_to_be_bytes_no_len_check( + &mut buf[current_idx..current_idx + self.filestore_message.len_full()], + ); + Ok(current_idx) + } + + fn len_written(&self) -> usize { + self.len_full() + } +} + +impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> { + fn tlv_type_field(&self) -> TlvTypeField { + TlvTypeField::Standard(TlvType::FilestoreResponse) + } } #[cfg(test)] mod tests { + use super::*; use crate::cfdp::lv::Lv; use crate::cfdp::tlv::{FilestoreActionCode, FilestoreRequestTlv, Tlv, TlvType, TlvTypeField}; use crate::cfdp::TlvLvError; - use crate::util::{UbfU8, UnsignedEnum}; + use crate::util::{UbfU16, UbfU8, UnsignedEnum}; + use alloc::string::ToString; const TLV_TEST_STR_0: &str = "hello.txt"; const TLV_TEST_STR_1: &str = "hello2.txt"; @@ -544,6 +812,45 @@ mod tests { assert_eq!(tlv_from_raw.value()[0], 5); } + #[test] + fn test_entity_id_tlv() { + let entity_id = UbfU16::new(0x0102); + let entity_id_tlv = EntityIdTlv::new(entity_id.into()); + let mut buf: [u8; 16] = [0; 16]; + let written_len = entity_id_tlv.write_to_bytes(&mut buf).unwrap(); + assert_eq!(written_len, entity_id_tlv.len_full()); + assert_eq!(entity_id_tlv.len_value(), 2); + assert!(entity_id_tlv.is_standard_tlv()); + assert_eq!(entity_id_tlv.tlv_type().unwrap(), TlvType::EntityId); + assert_eq!(buf[0], TlvType::EntityId as u8); + assert_eq!(buf[1], 2); + assert_eq!(u16::from_be_bytes(buf[2..4].try_into().unwrap()), 0x0102); + let entity_id_as_vec = entity_id_tlv.to_vec(); + assert_eq!(entity_id_as_vec, buf[0..written_len].to_vec()); + } + + #[test] + fn test_entity_id_from_generic_tlv() { + let entity_id = UbfU16::new(0x0102); + let entity_id_tlv = EntityIdTlv::new(entity_id.into()); + let mut buf: [u8; 16] = [0; 16]; + let entity_id_as_tlv: Tlv = entity_id_tlv.to_tlv(&mut buf).unwrap(); + let entity_id_converted_back: EntityIdTlv = entity_id_as_tlv.try_into().unwrap(); + assert_eq!(entity_id_converted_back, entity_id_tlv); + } + + #[test] + fn test_entity_id_from_raw() { + let entity_id = UbfU16::new(0x0102); + let entity_id_tlv = EntityIdTlv::new(entity_id.into()); + let mut buf: [u8; 16] = [0; 16]; + let _ = entity_id_tlv.write_to_bytes(&mut buf).unwrap(); + let entity_tlv_from_raw = + EntityIdTlv::from_bytes(&buf).expect("creating entity ID TLV failed"); + assert_eq!(entity_tlv_from_raw, entity_id_tlv); + assert_eq!(entity_tlv_from_raw.entity_id(), &entity_id.into()); + } + #[test] fn test_empty() { let tlv_empty = Tlv::new_empty(TlvType::MsgToUser); @@ -584,6 +891,41 @@ mod tests { assert!(tlv_empty.value().is_empty()); } + #[test] + fn test_write_buf_too_small() { + let mut buf: [u8; 2] = [0; 2]; + let fs_request = + FilestoreRequestTlv::new_create_file(Lv::new_from_str(TLV_TEST_STR_0).unwrap()) + .unwrap(); + let error = fs_request.write_to_bytes(&mut buf); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let ByteConversionError::ToSliceTooSmall { found, expected } = error { + assert_eq!(found, 2); + assert_eq!(expected, 13); + } else { + panic!("unexpected error {:?}", error); + } + } + + #[test] + fn test_read_from_buf_too_small() { + let buf: [u8; 1] = [0; 1]; + let error = FilestoreRequestTlv::from_bytes(&buf); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let TlvLvError::ByteConversion(ByteConversionError::FromSliceTooSmall { + found, + expected, + }) = error + { + assert_eq!(found, 1); + assert_eq!(expected, 2); + } else { + panic!("unexpected error {:?}", error); + } + } + #[test] fn test_buf_too_large() { let buf_too_large: [u8; u8::MAX as usize + 1] = [0; u8::MAX as usize + 1]; @@ -592,6 +934,10 @@ mod tests { let error = tlv_res.unwrap_err(); if let TlvLvError::DataTooLarge(size) = error { assert_eq!(size, u8::MAX as usize + 1); + assert_eq!( + error.to_string(), + "data with size 256 larger than allowed 255 bytes" + ); } else { panic!("unexpected error {:?}", error); } @@ -607,6 +953,7 @@ mod tests { assert!(tlv.is_ok()); let tlv = tlv.unwrap(); assert_eq!(tlv.tlv_type_field(), TlvTypeField::Custom(3)); + assert!(!tlv.is_standard_tlv()); assert_eq!(tlv.value().len(), 1); assert_eq!(tlv.len_full(), 3); } @@ -614,7 +961,7 @@ mod tests { fn generic_fs_request_test_one_file( action_code: FilestoreActionCode, ) -> FilestoreRequestTlv<'static, 'static> { - assert!(!FilestoreRequestTlv::has_second_filename(action_code)); + assert!(!fs_request_has_second_filename(action_code)); let first_name = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); let fs_request = match action_code { FilestoreActionCode::CreateFile => FilestoreRequestTlv::new_create_file(first_name), @@ -644,7 +991,7 @@ mod tests { fn generic_fs_request_test_two_files( action_code: FilestoreActionCode, ) -> FilestoreRequestTlv<'static, 'static> { - assert!(FilestoreRequestTlv::has_second_filename(action_code)); + assert!(fs_request_has_second_filename(action_code)); let first_name = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); let second_name = Lv::new_from_str(TLV_TEST_STR_1).unwrap(); let fs_request = match action_code { @@ -665,7 +1012,12 @@ mod tests { fs_request.len_value(), 1 + first_name.len_full() + second_name.len_full() ); + assert_eq!( + fs_request.tlv_type_field(), + TlvTypeField::Standard(TlvType::FilestoreRequest) + ); assert_eq!(fs_request.len_full(), fs_request.len_value() + 2); + assert_eq!(fs_request.len_written(), fs_request.len_full()); assert_eq!(fs_request.action_code(), action_code); assert_eq!(fs_request.first_name(), first_name); assert!(fs_request.second_name().is_some()); @@ -795,4 +1147,150 @@ mod tests { let req_conv_back = req_conv_back.unwrap(); assert_eq!(req_conv_back, req); } + + #[test] + fn test_fs_response_state_one_path() { + let lv_0 = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); + let response = FilestoreResponseTlv::new_no_filestore_message( + FilestoreActionCode::CreateFile, + 0b0001, + lv_0, + None, + ) + .expect("creating response failed"); + assert_eq!(response.status_code(), 0b0001); + assert_eq!(response.action_code(), FilestoreActionCode::CreateFile); + assert_eq!(response.first_name(), lv_0); + assert!(response.second_name().is_none()); + } + #[test] + fn test_fs_response_state_two_paths() { + let lv_0 = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); + let lv_1 = Lv::new_from_str(TLV_TEST_STR_1).unwrap(); + let response = FilestoreResponseTlv::new_no_filestore_message( + FilestoreActionCode::RenameFile, + 0b0001, + lv_0, + Some(lv_1), + ) + .expect("creating response failed"); + assert_eq!(response.status_code(), 0b0001); + assert_eq!(response.action_code(), FilestoreActionCode::RenameFile); + assert_eq!(response.first_name(), lv_0); + assert!(response.second_name().is_some()); + assert!(response.second_name().unwrap() == lv_1); + assert_eq!( + response.len_full(), + 2 + 1 + lv_0.len_full() + lv_1.len_full() + 1 + ); + } + + #[test] + fn test_fs_response_serialization() { + let lv_0 = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); + let response = FilestoreResponseTlv::new_no_filestore_message( + FilestoreActionCode::CreateFile, + 0b0001, + lv_0, + None, + ) + .expect("creating response failed"); + let mut buf: [u8; 32] = [0; 32]; + let written_len = response.write_to_bytes(&mut buf).unwrap(); + assert_eq!(written_len, 2 + 1 + lv_0.len_full() + 1); + assert_eq!(buf[0], TlvType::FilestoreResponse as u8); + assert_eq!(buf[1], written_len as u8 - 2); + assert_eq!( + (buf[2] >> 4) & 0b1111, + FilestoreActionCode::CreateFile as u8 + ); + assert_eq!(buf[2] & 0b1111, 0b0001); + let lv_read_back = Lv::from_bytes(&buf[3..]).unwrap(); + assert_eq!(lv_0, lv_read_back); + let current_idx = 3 + lv_0.len_full(); + let fs_msg_empty = Lv::from_bytes(&buf[current_idx..]).unwrap(); + assert!(fs_msg_empty.is_empty()); + } + + #[test] + fn test_fs_response_deserialization() { + let lv_0 = Lv::new_from_str(TLV_TEST_STR_0).unwrap(); + let response = FilestoreResponseTlv::new_no_filestore_message( + FilestoreActionCode::CreateFile, + 0b0001, + lv_0, + None, + ) + .expect("creating response failed"); + let mut buf: [u8; 32] = [0; 32]; + response.write_to_bytes(&mut buf).unwrap(); + let response_read_back = FilestoreResponseTlv::from_bytes(&buf).unwrap(); + assert_eq!(response_read_back, response); + } + + #[test] + fn test_entity_it_tlv_to_tlv() { + let entity_id = UbfU16::new(0x0102); + let entity_id_tlv = EntityIdTlv::new(entity_id.into()); + let mut binding = [0; 16]; + let tlv = entity_id_tlv.to_tlv(&mut binding).unwrap(); + assert_eq!( + tlv.tlv_type_field(), + TlvTypeField::Standard(TlvType::EntityId) + ); + assert_eq!(tlv.len_full(), 4); + assert_eq!(tlv.len_value(), 2); + assert_eq!(tlv.value(), &[0x01, 0x02]); + } + + #[test] + fn test_invalid_tlv_conversion() { + let msg_to_user_tlv = Tlv::new_empty(TlvType::MsgToUser); + let error = EntityIdTlv::try_from(msg_to_user_tlv); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let TlvLvError::InvalidTlvTypeField { found, expected } = error { + assert_eq!(found, TlvType::MsgToUser as u8); + assert_eq!(expected, Some(TlvType::EntityId as u8)); + assert_eq!( + error.to_string(), + "invalid TLV type field, found 2, expected Some(6)" + ); + } else { + panic!("unexpected error"); + } + } + + #[test] + fn test_entity_id_invalid_value_len() { + let entity_id = UbfU16::new(0x0102); + let entity_id_tlv = EntityIdTlv::new(entity_id.into()); + let mut buf: [u8; 32] = [0; 32]; + entity_id_tlv.write_to_bytes(&mut buf).unwrap(); + buf[1] = 12; + let error = EntityIdTlv::from_bytes(&buf); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let TlvLvError::InvalidValueLength(len) = error { + assert_eq!(len, 12); + assert_eq!(error.to_string(), "invalid value length 12"); + } else { + panic!("unexpected error"); + } + } + + #[test] + fn test_custom_tlv() { + let custom_tlv = Tlv::new_with_custom_type(20, &[]).unwrap(); + assert!(custom_tlv.tlv_type().is_none()); + if let TlvTypeField::Custom(val) = custom_tlv.tlv_type_field() { + assert_eq!(val, 20); + } else { + panic!("unexpected type field"); + } + let tlv_as_vec = custom_tlv.to_vec(); + assert_eq!(tlv_as_vec.len(), 2); + assert_eq!(tlv_as_vec[0], 20); + assert_eq!(tlv_as_vec[1], 0); + } } diff --git a/src/cfdp/tlv/msg_to_user.rs b/src/cfdp/tlv/msg_to_user.rs index 205d2d8..17cdb0f 100644 --- a/src/cfdp/tlv/msg_to_user.rs +++ b/src/cfdp/tlv/msg_to_user.rs @@ -1,5 +1,5 @@ //! Abstractions for the Message to User CFDP TLV subtype. -use super::{Tlv, TlvLvError, TlvType, TlvTypeField}; +use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv}; use crate::ByteConversionError; use delegate::delegate; @@ -18,8 +18,6 @@ impl<'data> MsgToUserTlv<'data> { delegate! { to self.tlv { - pub fn tlv_type_field(&self) -> TlvTypeField; - pub fn write_to_bytes(&self, buf: &mut [u8]) -> Result; pub fn value(&self) -> &[u8]; /// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of /// [Self::value] @@ -59,29 +57,48 @@ impl<'data> MsgToUserTlv<'data> { let msg_to_user = Self { tlv: Tlv::from_bytes(buf)?, }; - match msg_to_user.tlv_type_field() { + match msg_to_user.tlv.tlv_type_field() { TlvTypeField::Standard(tlv_type) => { if tlv_type != TlvType::MsgToUser { - return Err(TlvLvError::InvalidTlvTypeField(( - tlv_type as u8, - Some(TlvType::MsgToUser as u8), - ))); + return Err(TlvLvError::InvalidTlvTypeField { + found: tlv_type as u8, + expected: Some(TlvType::MsgToUser as u8), + }); } } TlvTypeField::Custom(raw) => { - return Err(TlvLvError::InvalidTlvTypeField(( - raw, - Some(TlvType::MsgToUser as u8), - ))); + return Err(TlvLvError::InvalidTlvTypeField { + found: raw, + expected: Some(TlvType::MsgToUser as u8), + }); } } Ok(msg_to_user) } } +impl WritableTlv for MsgToUserTlv<'_> { + fn len_written(&self) -> usize { + self.len_full() + } + + delegate!( + to self.tlv { + fn write_to_bytes(&self, buf: &mut [u8]) -> Result; + } + ); +} + +impl GenericTlv for MsgToUserTlv<'_> { + fn tlv_type_field(&self) -> TlvTypeField { + self.tlv.tlv_type_field() + } +} + #[cfg(test)] mod tests { use super::*; + #[test] fn test_basic() { let custom_value: [u8; 4] = [1, 2, 3, 4]; @@ -90,6 +107,10 @@ mod tests { let msg_to_user = msg_to_user.unwrap(); assert!(msg_to_user.is_standard_tlv()); assert_eq!(msg_to_user.tlv_type().unwrap(), TlvType::MsgToUser); + assert_eq!( + msg_to_user.tlv_type_field(), + TlvTypeField::Standard(TlvType::MsgToUser) + ); assert_eq!(msg_to_user.value(), custom_value); assert_eq!(msg_to_user.value().len(), 4); assert_eq!(msg_to_user.len_value(), 4); @@ -99,6 +120,48 @@ mod tests { assert!(!msg_to_user.is_reserved_cfdp_msg()); } + #[test] + fn test_reserved_msg_serialization() { + let custom_value: [u8; 4] = [1, 2, 3, 4]; + let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap(); + let mut buf: [u8; 6] = [0; 6]; + msg_to_user.write_to_bytes(&mut buf).unwrap(); + assert_eq!( + buf, + [ + TlvType::MsgToUser as u8, + custom_value.len() as u8, + 1, + 2, + 3, + 4 + ] + ); + } + + #[test] + fn test_reserved_msg_deserialization() { + let custom_value: [u8; 3] = [1, 2, 3]; + let msg_to_user = MsgToUserTlv::new(&custom_value).unwrap(); + let msg_to_user_vec = msg_to_user.to_vec(); + let msg_to_user_from_bytes = MsgToUserTlv::from_bytes(&msg_to_user_vec).unwrap(); + assert!(!msg_to_user.is_reserved_cfdp_msg()); + assert_eq!(msg_to_user_from_bytes, msg_to_user); + assert_eq!(msg_to_user_from_bytes.value(), msg_to_user.value()); + assert_eq!(msg_to_user_from_bytes.tlv_type(), msg_to_user.tlv_type()); + } + #[test] + fn test_reserved_msg_deserialization_invalid_type() { + let trash: [u8; 5] = [TlvType::FlowLabel as u8, 3, 1, 2, 3]; + let error = MsgToUserTlv::from_bytes(&trash).unwrap_err(); + if let TlvLvError::InvalidTlvTypeField { found, expected } = error { + assert_eq!(found, TlvType::FlowLabel as u8); + assert_eq!(expected, Some(TlvType::MsgToUser as u8)); + } else { + panic!("Wrong error type returned: {:?}", error); + } + } + #[test] fn test_reserved_msg() { let reserved_str = "cfdp"; diff --git a/src/ecss/hk.rs b/src/ecss/hk.rs index 5bf7ce5..5af3547 100644 --- a/src/ecss/hk.rs +++ b/src/ecss/hk.rs @@ -29,3 +29,33 @@ pub enum Subservice { TcGenerateOneShotDiag = 28, TcModifyDiagCollectionInterval = 32, } + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_try_from_u8() { + let hk_report_subservice_raw = 25; + let hk_report: Subservice = Subservice::try_from(hk_report_subservice_raw).unwrap(); + assert_eq!(hk_report, Subservice::TmHkPacket); + } + + #[test] + fn test_into_u8() { + let hk_report_raw: u8 = Subservice::TmHkPacket.into(); + assert_eq!(hk_report_raw, 25); + } + + #[test] + fn test_partial_eq() { + let hk_report_raw = Subservice::TmHkPacket; + assert_ne!(hk_report_raw, Subservice::TcGenerateOneShotHk); + assert_eq!(hk_report_raw, Subservice::TmHkPacket); + } + #[test] + fn test_copy_clone() { + let hk_report = Subservice::TmHkPacket; + let hk_report_copy = hk_report; + assert_eq!(hk_report, hk_report_copy); + } +} diff --git a/src/ecss/mod.rs b/src/ecss/mod.rs index ab03582..19d44a6 100644 --- a/src/ecss/mod.rs +++ b/src/ecss/mod.rs @@ -74,6 +74,7 @@ pub enum PusServiceId { /// All PUS versions. Only PUS C is supported by this library. #[derive(PartialEq, Eq, Copy, Clone, Debug)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[non_exhaustive] pub enum PusVersion { EsaPus = 0, PusA = 1, @@ -95,8 +96,9 @@ impl TryFrom for PusVersion { } /// ECSS Packet Type Codes (PTC)s. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[repr(u8)] pub enum PacketTypeCodes { Boolean = 1, Enumerated = 2, @@ -115,9 +117,10 @@ pub enum PacketTypeCodes { pub type Ptc = PacketTypeCodes; /// ECSS Packet Field Codes (PFC)s for the unsigned [Ptc]. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub enum UnsignedPfc { +#[repr(u8)] +pub enum PfcUnsigned { OneByte = 4, TwelveBits = 8, TwoBytes = 12, @@ -131,9 +134,10 @@ pub enum UnsignedPfc { } /// ECSS Packet Field Codes (PFC)s for the real (floating point) [Ptc]. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub enum RealPfc { +#[repr(u8)] +pub enum PfcReal { /// 4 octets simple precision format (IEEE) Float = 1, /// 8 octets simple precision format (IEEE) @@ -148,9 +152,7 @@ pub enum RealPfc { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum PusError { VersionNotSupported(PusVersion), - IncorrectCrc(u16), - RawDataTooShort(usize), - NoRawData, + ChecksumFailure(u16), /// CRC16 needs to be calculated first CrcCalculationMissing, ByteConversion(ByteConversionError), @@ -162,23 +164,14 @@ impl Display for PusError { PusError::VersionNotSupported(v) => { write!(f, "PUS version {v:?} not supported") } - PusError::IncorrectCrc(crc) => { - write!(f, "crc16 {crc:#04x} is incorrect") - } - PusError::RawDataTooShort(size) => { - write!( - f, - "deserialization error, provided raw data with size {size} too short" - ) - } - PusError::NoRawData => { - write!(f, "no raw data provided") + PusError::ChecksumFailure(crc) => { + write!(f, "checksum verification for crc16 {crc:#06x} failed") } PusError::CrcCalculationMissing => { write!(f, "crc16 was not calculated") } PusError::ByteConversion(e) => { - write!(f, "low level byte conversion error: {e}") + write!(f, "pus error: {e}") } } } @@ -214,7 +207,11 @@ pub trait PusPacket: CcsdsPacket { pub(crate) fn crc_from_raw_data(raw_data: &[u8]) -> Result { if raw_data.len() < 2 { - return Err(PusError::RawDataTooShort(raw_data.len())); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data.len(), + expected: 2, + } + .into()); } Ok(u16::from_be_bytes( raw_data[raw_data.len() - 2..raw_data.len()] @@ -250,11 +247,14 @@ pub(crate) fn crc_procedure( pub(crate) fn user_data_from_raw( current_idx: usize, total_len: usize, - raw_data_len: usize, slice: &[u8], ) -> Result<&[u8], PusError> { match current_idx { - _ if current_idx > total_len - 2 => Err(PusError::RawDataTooShort(raw_data_len)), + _ if current_idx > total_len - 2 => Err(ByteConversionError::FromSliceTooSmall { + found: total_len - 2, + expected: current_idx, + } + .into()), _ => Ok(&slice[current_idx..total_len - 2]), } } @@ -265,7 +265,7 @@ pub(crate) fn verify_crc16_ccitt_false_from_raw_to_pus_error( ) -> Result<(), PusError> { verify_crc16_ccitt_false_from_raw(raw_data) .then(|| ()) - .ok_or(PusError::IncorrectCrc(crc16)) + .ok_or(PusError::ChecksumFailure(crc16)) } pub(crate) fn verify_crc16_ccitt_false_from_raw(raw_data: &[u8]) -> bool { @@ -315,11 +315,11 @@ pub trait EcssEnumerationExt: EcssEnumeration + Debug + Copy + Clone + PartialEq #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct GenericEcssEnumWrapper { +pub struct GenericEcssEnumWrapper { field: GenericUnsignedByteField, } -impl GenericEcssEnumWrapper { +impl GenericEcssEnumWrapper { pub const fn ptc() -> PacketTypeCodes { PacketTypeCodes::Enumerated } @@ -331,7 +331,7 @@ impl GenericEcssEnumWrapper { } } -impl UnsignedEnum for GenericEcssEnumWrapper { +impl UnsignedEnum for GenericEcssEnumWrapper { fn size(&self) -> usize { (self.pfc() / 8) as usize } @@ -341,7 +341,7 @@ impl UnsignedEnum for GenericEcssEnumWrapper { } } -impl EcssEnumeration for GenericEcssEnumWrapper { +impl EcssEnumeration for GenericEcssEnumWrapper { fn pfc(&self) -> u8 { size_of::() as u8 * 8_u8 } @@ -376,13 +376,22 @@ pub trait WritablePusPacket { #[cfg(test)] mod tests { + use alloc::string::ToString; + use crate::ecss::{EcssEnumU16, EcssEnumU32, EcssEnumU8, UnsignedEnum}; use crate::ByteConversionError; + use super::*; + #[cfg(feature = "serde")] + use crate::tests::generic_serde_test; + #[test] fn test_enum_u8() { let mut buf = [0, 0, 0]; let my_enum = EcssEnumU8::new(1); + assert_eq!(EcssEnumU8::ptc(), Ptc::Enumerated); + assert_eq!(my_enum.size(), 1); + assert_eq!(my_enum.pfc(), 8); my_enum .write_to_be_bytes(&mut buf[1..2]) .expect("To byte conversion of u8 failed"); @@ -396,6 +405,8 @@ mod tests { my_enum .write_to_be_bytes(&mut buf[1..3]) .expect("To byte conversion of u8 failed"); + assert_eq!(my_enum.size(), 2); + assert_eq!(my_enum.pfc(), 16); assert_eq!(buf[1], 0x1f); assert_eq!(buf[2], 0x2f); } @@ -448,4 +459,78 @@ mod tests { } } } + + #[test] + fn test_pus_error_display() { + let unsupport_version = PusError::VersionNotSupported(super::PusVersion::EsaPus); + let write_str = unsupport_version.to_string(); + assert_eq!(write_str, "PUS version EsaPus not supported") + } + + #[test] + fn test_service_id_from_u8() { + let verification_id_raw = 1; + let verification_id = PusServiceId::try_from(verification_id_raw).unwrap(); + assert_eq!(verification_id, PusServiceId::Verification); + } + + #[test] + fn test_ptc_from_u8() { + let ptc_raw = Ptc::AbsoluteTime as u8; + let ptc = Ptc::try_from(ptc_raw).unwrap(); + assert_eq!(ptc, Ptc::AbsoluteTime); + } + + #[test] + fn test_unsigned_pfc_from_u8() { + let pfc_raw = PfcUnsigned::OneByte as u8; + let pfc = PfcUnsigned::try_from(pfc_raw).unwrap(); + assert_eq!(pfc, PfcUnsigned::OneByte); + } + + #[test] + fn test_real_pfc_from_u8() { + let pfc_raw = PfcReal::Double as u8; + let pfc = PfcReal::try_from(pfc_raw).unwrap(); + assert_eq!(pfc, PfcReal::Double); + } + + #[test] + fn test_pus_error_eq_impl() { + assert_eq!( + PusError::VersionNotSupported(PusVersion::EsaPus), + PusError::VersionNotSupported(PusVersion::EsaPus) + ); + } + + #[test] + fn test_pus_error_clonable() { + let pus_error = PusError::ChecksumFailure(0x0101); + let cloned = pus_error; + assert_eq!(pus_error, cloned); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_pus_service_id() { + generic_serde_test(PusServiceId::Verification); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_ptc() { + generic_serde_test(Ptc::AbsoluteTime); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_pfc_unsigned() { + generic_serde_test(PfcUnsigned::EightBytes); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_pfc_real() { + generic_serde_test(PfcReal::Double); + } } diff --git a/src/ecss/scheduling.rs b/src/ecss/scheduling.rs index c6c36ea..d9b50b1 100644 --- a/src/ecss/scheduling.rs +++ b/src/ecss/scheduling.rs @@ -76,6 +76,8 @@ pub enum TimeWindowType { #[cfg(test)] mod tests { use super::*; + #[cfg(feature = "serde")] + use crate::tests::generic_serde_test; #[test] fn test_bool_conv_0() { @@ -102,4 +104,22 @@ mod tests { let subservice: Subservice = 22u8.try_into().unwrap(); assert_eq!(subservice, Subservice::TcCreateScheduleGroup); } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_subservice_id() { + generic_serde_test(Subservice::TcEnableScheduling); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_sched_status() { + generic_serde_test(SchedStatus::Enabled); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_time_window_type() { + generic_serde_test(TimeWindowType::SelectAll); + } } diff --git a/src/ecss/tc.rs b/src/ecss/tc.rs index 6e7bc53..7e7c938 100644 --- a/src/ecss/tc.rs +++ b/src/ecss/tc.rs @@ -40,6 +40,7 @@ use crate::{ByteConversionError, CcsdsPacket, PacketType, SequenceFlags, CCSDS_H use crate::{SpHeader, CRC_CCITT_FALSE}; use core::mem::size_of; use delegate::delegate; +use num_enum::{IntoPrimitive, TryFromPrimitive}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use zerocopy::AsBytes; @@ -58,7 +59,9 @@ const PUS_VERSION: PusVersion = PusVersion::PusC; /// Marker trait for PUS telecommand structures. pub trait IsPusTelecommand {} -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[repr(u8)] enum AckOpts { Acceptance = 0b1000, Start = 0b0100, @@ -405,14 +408,22 @@ pub mod legacy_tc { pub fn from_bytes(slice: &'raw_data [u8]) -> Result<(Self, usize), PusError> { let raw_data_len = slice.len(); if raw_data_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: PUS_TC_MIN_LEN_WITHOUT_APP_DATA, + } + .into()); } let mut current_idx = 0; let (sp_header, _) = SpHeader::from_be_bytes(&slice[0..CCSDS_HEADER_LEN])?; current_idx += CCSDS_HEADER_LEN; let total_len = sp_header.total_len(); if raw_data_len < total_len || total_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: total_len, + } + .into()); } let sec_header = zc::PusTcSecondaryHeader::from_bytes( &slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN], @@ -424,7 +435,7 @@ pub mod legacy_tc { sp_header, sec_header: PusTcSecondaryHeader::try_from(sec_header).unwrap(), raw_data: Some(raw_data), - app_data: user_data_from_raw(current_idx, total_len, raw_data_len, slice)?, + app_data: user_data_from_raw(current_idx, total_len, slice)?, calc_crc_on_serialization: false, crc16: Some(crc_from_raw_data(raw_data)?), }; @@ -748,14 +759,29 @@ impl<'raw_data> PusTcReader<'raw_data> { pub fn new(slice: &'raw_data [u8]) -> Result<(Self, usize), PusError> { let raw_data_len = slice.len(); if raw_data_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: PUS_TC_MIN_LEN_WITHOUT_APP_DATA, + } + .into()); } let mut current_idx = 0; let (sp_header, _) = SpHeader::from_be_bytes(&slice[0..CCSDS_HEADER_LEN])?; current_idx += CCSDS_HEADER_LEN; let total_len = sp_header.total_len(); - if raw_data_len < total_len || total_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + if raw_data_len < total_len { + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: total_len, + } + .into()); + } + if total_len < PUS_TC_MIN_LEN_WITHOUT_APP_DATA { + return Err(ByteConversionError::FromSliceTooSmall { + found: total_len, + expected: PUS_TC_MIN_LEN_WITHOUT_APP_DATA, + } + .into()); } let sec_header = zc::PusTcSecondaryHeader::from_bytes( &slice[current_idx..current_idx + PUC_TC_SECONDARY_HEADER_LEN], @@ -767,7 +793,7 @@ impl<'raw_data> PusTcReader<'raw_data> { sp_header, sec_header: PusTcSecondaryHeader::try_from(sec_header).unwrap(), raw_data, - app_data: user_data_from_raw(current_idx, total_len, raw_data_len, slice)?, + app_data: user_data_from_raw(current_idx, total_len, slice)?, crc16: crc_from_raw_data(raw_data)?, }; verify_crc16_ccitt_false_from_raw_to_pus_error(raw_data, pus_tc.crc16)?; @@ -847,14 +873,17 @@ impl PartialEq> for PusTcCreator<'_> { #[cfg(all(test, feature = "std"))] mod tests { - use crate::ecss::tc::{ - GenericPusTcSecondaryHeader, PusTcCreator, PusTcReader, PusTcSecondaryHeader, ACK_ALL, - }; + use std::error::Error; + + use super::*; use crate::ecss::PusVersion::PusC; use crate::ecss::{PusError, PusPacket, WritablePusPacket}; use crate::{ByteConversionError, SpHeader}; use crate::{CcsdsPacket, SequenceFlags}; + use alloc::string::ToString; use alloc::vec::Vec; + #[cfg(feature = "serde")] + use postcard::{from_bytes, to_allocvec}; fn base_ping_tc_full_ctor() -> PusTcCreator<'static> { let mut sph = SpHeader::tc_unseg(0x02, 0x34, 0).unwrap(); @@ -886,6 +915,10 @@ mod tests { .write_to_bytes(test_buf.as_mut_slice()) .expect("Error writing TC to buffer"); assert_eq!(size, 13); + assert_eq!( + pus_tc.crc16().unwrap(), + u16::from_be_bytes(test_buf[size - 2..size].try_into().unwrap()) + ); } #[test] @@ -940,11 +973,31 @@ mod tests { assert_eq!(size, 16); verify_test_tc_with_reader(&tc_from_raw, true, 16); let user_data = tc_from_raw.user_data(); + assert_eq!(tc_from_raw.user_data(), tc_from_raw.app_data()); + assert_eq!(tc_from_raw.raw_data(), &test_buf[..size]); + assert_eq!( + tc_from_raw.crc16().unwrap(), + u16::from_be_bytes(test_buf[size - 2..size].try_into().unwrap()) + ); assert_eq!(user_data[0], 1); assert_eq!(user_data[1], 2); assert_eq!(user_data[2], 3); } + #[test] + fn test_reader_eq() { + let pus_tc = base_ping_tc_simple_ctor_with_app_data(&[1, 2, 3]); + let mut test_buf: [u8; 32] = [0; 32]; + pus_tc + .write_to_bytes(test_buf.as_mut_slice()) + .expect("Error writing TC to buffer"); + let (tc_from_raw_0, _) = + PusTcReader::new(&test_buf).expect("Creating PUS TC struct from raw buffer failed"); + let (tc_from_raw_1, _) = + PusTcReader::new(&test_buf).expect("Creating PUS TC struct from raw buffer failed"); + assert_eq!(tc_from_raw_0, tc_from_raw_1); + } + #[test] fn test_vec_ser_deser() { let pus_tc = base_ping_tc_simple_ctor(); @@ -965,10 +1018,19 @@ mod tests { .write_to_bytes(test_buf.as_mut_slice()) .expect("Error writing TC to buffer"); test_buf[12] = 0; + test_buf[11] = 0; let res = PusTcReader::new(&test_buf); assert!(res.is_err()); let err = res.unwrap_err(); - assert!(matches!(err, PusError::IncorrectCrc { .. })); + if let PusError::ChecksumFailure(crc) = err { + assert_eq!(crc, 0); + assert_eq!( + err.to_string(), + "checksum verification for crc16 0x0000 failed" + ); + } else { + panic!("unexpected error {err}"); + } } #[test] @@ -1004,15 +1066,21 @@ mod tests { let res = pus_tc.write_to_bytes(test_buf.as_mut_slice()); assert!(res.is_err()); let err = res.unwrap_err(); - match err { - PusError::ByteConversion(err) => match err { - ByteConversionError::ToSliceTooSmall { found, expected } => { - assert_eq!(expected, pus_tc.len_written()); - assert_eq!(found, 12); + if let PusError::ByteConversion(e) = err { + assert_eq!( + e, + ByteConversionError::ToSliceTooSmall { + found: 12, + expected: 13 } - _ => panic!("Unexpected error"), - }, - _ => panic!("Unexpected error"), + ); + assert_eq!( + err.to_string(), + "pus error: target slice with size 12 is too small, expected size of at least 13" + ); + assert_eq!(err.source().unwrap().to_string(), e.to_string()); + } else { + panic!("unexpected error {err}"); } } @@ -1081,13 +1149,20 @@ mod tests { fn verify_test_tc_generic(tc: &(impl CcsdsPacket + PusPacket + GenericPusTcSecondaryHeader)) { assert_eq!(PusPacket::service(tc), 17); + assert_eq!(GenericPusTcSecondaryHeader::service(tc), 17); assert_eq!(PusPacket::subservice(tc), 1); + assert_eq!(GenericPusTcSecondaryHeader::subservice(tc), 1); assert!(tc.sec_header_flag()); assert_eq!(PusPacket::pus_version(tc), PusC); assert_eq!(tc.seq_count(), 0x34); assert_eq!(tc.source_id(), 0); assert_eq!(tc.apid(), 0x02); assert_eq!(tc.ack_flags(), ACK_ALL); + assert_eq!(PusPacket::pus_version(tc), PusVersion::PusC); + assert_eq!( + GenericPusTcSecondaryHeader::pus_version(tc), + PusVersion::PusC + ); } fn verify_test_tc_raw(slice: &impl AsRef<[u8]>) { // Reference comparison implementation: @@ -1139,4 +1214,59 @@ mod tests { assert_eq!(pus_tc, PusTcReader::new(&buf).unwrap().0); assert_eq!(PusTcReader::new(&buf).unwrap().0, pus_tc); } + + #[test] + fn test_ack_opts_from_raw() { + let ack_opts_raw = AckOpts::Start as u8; + let ack_opts = AckOpts::try_from(ack_opts_raw).unwrap(); + assert_eq!(ack_opts, AckOpts::Start); + } + + #[test] + fn test_reader_buf_too_small() { + let app_data = &[1, 2, 3, 4]; + let pus_tc = base_ping_tc_simple_ctor_with_app_data(app_data); + let mut buf = [0; 32]; + let written_len = pus_tc.write_to_bytes(&mut buf).unwrap(); + let error = PusTcReader::new(&buf[0..PUS_TC_MIN_LEN_WITHOUT_APP_DATA + 1]); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let PusError::ByteConversion(ByteConversionError::FromSliceTooSmall { + found, + expected, + }) = error + { + assert_eq!(found, PUS_TC_MIN_LEN_WITHOUT_APP_DATA + 1); + assert_eq!(expected, written_len); + } else { + panic!("unexpected error {error}") + } + } + + #[test] + fn test_reader_input_too_small() { + let buf: [u8; 5] = [0; 5]; + let error = PusTcReader::new(&buf); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let PusError::ByteConversion(ByteConversionError::FromSliceTooSmall { + found, + expected, + }) = error + { + assert_eq!(found, 5); + assert_eq!(expected, PUS_TC_MIN_LEN_WITHOUT_APP_DATA); + } else { + panic!("unexpected error {error}") + } + } + + #[test] + #[cfg(feature = "serde")] + fn test_serialization_tc_serde() { + let pus_tc = base_ping_tc_simple_ctor(); + let output = to_allocvec(&pus_tc).unwrap(); + let output_converted_back: PusTcCreator = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, pus_tc); + } } diff --git a/src/ecss/tm.rs b/src/ecss/tm.rs index a12db61..40ee9c8 100644 --- a/src/ecss/tm.rs +++ b/src/ecss/tm.rs @@ -381,14 +381,29 @@ pub mod legacy_tm { ) -> Result<(Self, usize), PusError> { let raw_data_len = slice.len(); if raw_data_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA, + } + .into()); } let mut current_idx = 0; let (sp_header, _) = SpHeader::from_be_bytes(&slice[0..CCSDS_HEADER_LEN])?; current_idx += 6; let total_len = sp_header.total_len(); - if raw_data_len < total_len || total_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + if raw_data_len < total_len { + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA, + } + .into()); + } + if total_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { + return Err(ByteConversionError::FromSliceTooSmall { + found: total_len, + expected: PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA, + } + .into()); } let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::from_bytes( &slice[current_idx..current_idx + PUC_TM_MIN_SEC_HEADER_LEN], @@ -405,7 +420,7 @@ pub mod legacy_tm { sp_header, sec_header: PusTmSecondaryHeader::try_from(zc_sec_header_wrapper).unwrap(), raw_data: Some(&slice[0..total_len]), - source_data: user_data_from_raw(current_idx, total_len, raw_data_len, slice)?, + source_data: user_data_from_raw(current_idx, total_len, slice)?, calc_crc_on_serialization: false, crc16: Some(crc_from_raw_data(raw_data)?), }; @@ -541,21 +556,21 @@ impl<'raw_data> PusTmCreator<'raw_data> { /// automatically /// * `sec_header` - Information contained in the secondary header, including the service /// and subservice type - /// * `app_data` - Custom application data + /// * `source_data` - Custom application data /// * `set_ccsds_len` - Can be used to automatically update the CCSDS space packet data length /// field. If this is not set to true, [PusTm::update_ccsds_data_len] can be called to set /// the correct value to this field manually pub fn new( sp_header: &mut SpHeader, sec_header: PusTmSecondaryHeader<'raw_data>, - source_data: Option<&'raw_data [u8]>, + source_data: &'raw_data [u8], set_ccsds_len: bool, ) -> Self { sp_header.set_packet_type(PacketType::Tm); sp_header.set_sec_header_flag(); let mut pus_tm = Self { sp_header: *sp_header, - source_data: source_data.unwrap_or(&[]), + source_data, sec_header, calc_crc_on_serialization: true, }; @@ -571,7 +586,7 @@ impl<'raw_data> PusTmCreator<'raw_data> { subservice: u8, time_provider: &impl TimeWriter, stamp_buf: &'raw_data mut [u8], - source_data: Option<&'raw_data [u8]>, + source_data: &'raw_data [u8], set_ccsds_len: bool, ) -> Result { let stamp_size = time_provider.write_to_bytes(stamp_buf)?; @@ -579,6 +594,25 @@ impl<'raw_data> PusTmCreator<'raw_data> { PusTmSecondaryHeader::new_simple(service, subservice, &stamp_buf[0..stamp_size]); Ok(Self::new(sp_header, sec_header, source_data, set_ccsds_len)) } + + pub fn new_no_source_data( + sp_header: &mut SpHeader, + sec_header: PusTmSecondaryHeader<'raw_data>, + set_ccsds_len: bool, + ) -> Self { + sp_header.set_packet_type(PacketType::Tm); + sp_header.set_sec_header_flag(); + let mut pus_tm = Self { + sp_header: *sp_header, + source_data: &[], + sec_header, + calc_crc_on_serialization: true, + }; + if set_ccsds_len { + pus_tm.update_ccsds_data_len(); + } + pus_tm + } pub fn timestamp(&self) -> &[u8] { self.sec_header.timestamp } @@ -760,14 +794,29 @@ impl<'raw_data> PusTmReader<'raw_data> { pub fn new(slice: &'raw_data [u8], timestamp_len: usize) -> Result<(Self, usize), PusError> { let raw_data_len = slice.len(); if raw_data_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA, + } + .into()); } let mut current_idx = 0; let (sp_header, _) = SpHeader::from_be_bytes(&slice[0..CCSDS_HEADER_LEN])?; current_idx += 6; let total_len = sp_header.total_len(); - if raw_data_len < total_len || total_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { - return Err(PusError::RawDataTooShort(raw_data_len)); + if raw_data_len < total_len { + return Err(ByteConversionError::FromSliceTooSmall { + found: raw_data_len, + expected: total_len, + } + .into()); + } + if total_len < PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA { + return Err(ByteConversionError::FromSliceTooSmall { + found: total_len, + expected: PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA, + } + .into()); } let sec_header_zc = zc::PusTmSecHeaderWithoutTimestamp::from_bytes( &slice[current_idx..current_idx + PUC_TM_MIN_SEC_HEADER_LEN], @@ -784,7 +833,7 @@ impl<'raw_data> PusTmReader<'raw_data> { sp_header, sec_header: PusTmSecondaryHeader::try_from(zc_sec_header_wrapper).unwrap(), raw_data: &slice[0..total_len], - source_data: user_data_from_raw(current_idx, total_len, raw_data_len, slice)?, + source_data: user_data_from_raw(current_idx, total_len, slice)?, crc16: crc_from_raw_data(raw_data)?, }; verify_crc16_ccitt_false_from_raw_to_pus_error(raw_data, pus_tm.crc16)?; @@ -811,7 +860,10 @@ impl<'raw_data> PusTmReader<'raw_data> { impl PartialEq for PusTmReader<'_> { fn eq(&self, other: &Self) -> bool { - self.raw_data == other.raw_data + self.sec_header == other.sec_header + && self.source_data == other.source_data + && self.sp_header == other.sp_header + && self.crc16 == other.crc16 } } @@ -943,20 +995,27 @@ impl<'raw> PusTmZeroCopyWriter<'raw> { #[cfg(test)] mod tests { + use alloc::string::ToString; + use super::*; use crate::ecss::PusVersion::PusC; + use crate::time::cds::TimeProvider; + #[cfg(feature = "serde")] + use crate::time::CcsdsTimeProvider; use crate::SpHeader; + #[cfg(feature = "serde")] + use postcard::{from_bytes, to_allocvec}; fn base_ping_reply_full_ctor(timestamp: &[u8]) -> PusTmCreator { let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let tm_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp); - PusTmCreator::new(&mut sph, tm_header, None, true) + PusTmCreator::new_no_source_data(&mut sph, tm_header, true) } fn base_hk_reply<'a>(timestamp: &'a [u8], src_data: &'a [u8]) -> PusTmCreator<'a> { let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let tc_header = PusTmSecondaryHeader::new_simple(3, 5, timestamp); - PusTmCreator::new(&mut sph, tc_header, Some(src_data), true) + PusTmCreator::new(&mut sph, tc_header, src_data, true) } fn dummy_timestamp() -> &'static [u8] { @@ -969,6 +1028,16 @@ mod tests { let pus_tm = base_ping_reply_full_ctor(timestamp); verify_ping_reply(&pus_tm, false, 22, dummy_timestamp()); } + #[test] + fn test_basic_simple_api() { + let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); + let time_provider = TimeProvider::new_with_u16_days(0, 0); + let mut stamp_buf: [u8; 8] = [0; 8]; + let pus_tm = + PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, &[], true) + .unwrap(); + verify_ping_reply(&pus_tm, false, 22, &[64, 0, 0, 0, 0, 0, 0]); + } #[test] fn test_serialization_no_source_data() { @@ -979,7 +1048,7 @@ mod tests { .write_to_bytes(&mut buf) .expect("Serialization failed"); assert_eq!(ser_len, 22); - verify_raw_ping_reply(&buf); + verify_raw_ping_reply(pus_tm.crc16().unwrap(), &buf); } #[test] @@ -1032,14 +1101,39 @@ mod tests { assert_eq!(ser_len, 22); let (tm_deserialized, size) = PusTmReader::new(&buf, 7).expect("Deserialization failed"); assert_eq!(ser_len, size); + assert_eq!(tm_deserialized.user_data(), tm_deserialized.source_data()); + assert_eq!(tm_deserialized.raw_data(), &buf[..ser_len]); + assert_eq!(tm_deserialized.crc16().unwrap(), pus_tm.crc16().unwrap()); verify_ping_reply_with_reader(&tm_deserialized, false, 22, dummy_timestamp()); } + #[test] + fn test_deserialization_faulty_crc() { + let timestamp = dummy_timestamp(); + let pus_tm = base_ping_reply_full_ctor(timestamp); + let mut buf: [u8; 32] = [0; 32]; + let ser_len = pus_tm + .write_to_bytes(&mut buf) + .expect("Serialization failed"); + assert_eq!(ser_len, 22); + buf[ser_len - 2] = 0; + buf[ser_len - 1] = 0; + let tm_error = PusTmReader::new(&buf, 7); + assert!(tm_error.is_err()); + let tm_error = tm_error.unwrap_err(); + if let PusError::ChecksumFailure(crc) = tm_error { + assert_eq!(crc, 0); + assert_eq!( + tm_error.to_string(), + "checksum verification for crc16 0x0000 failed" + ); + } + } #[test] fn test_manual_field_update() { let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); let tc_header = PusTmSecondaryHeader::new_simple(17, 2, dummy_timestamp()); - let mut tm = PusTmCreator::new(&mut sph, tc_header, None, false); + let mut tm = PusTmCreator::new_no_source_data(&mut sph, tc_header, false); tm.calc_crc_on_serialization = false; assert_eq!(tm.data_len(), 0x00); let mut buf: [u8; 32] = [0; 32]; @@ -1085,7 +1179,7 @@ mod tests { let res = pus_tm.append_to_vec(&mut vec); assert!(res.is_ok()); assert_eq!(res.unwrap(), 22); - verify_raw_ping_reply(vec.as_slice()); + verify_raw_ping_reply(pus_tm.crc16().unwrap(), vec.as_slice()); } #[test] @@ -1101,7 +1195,7 @@ mod tests { assert_eq!(vec.len(), 26); } - fn verify_raw_ping_reply(buf: &[u8]) { + fn verify_raw_ping_reply(crc16: u16, buf: &[u8]) { // Secondary header is set -> 0b0000_1001 , APID occupies last bit of first byte assert_eq!(buf[0], 0x09); // Rest of APID 0x123 @@ -1124,9 +1218,10 @@ mod tests { assert_eq!(&buf[13..20], dummy_timestamp()); let mut digest = CRC_CCITT_FALSE.digest(); digest.update(&buf[0..20]); - let crc16 = digest.finalize(); + let crc16_calced = digest.finalize(); assert_eq!(((crc16 >> 8) & 0xff) as u8, buf[20]); assert_eq!((crc16 & 0xff) as u8, buf[21]); + assert_eq!(crc16, crc16_calced); } fn verify_ping_reply( @@ -1137,6 +1232,7 @@ mod tests { ) { assert_eq!(tm.len_written(), exp_full_len); assert_eq!(tm.timestamp(), exp_timestamp); + assert_eq!(tm.source_data(), tm.user_data()); verify_ping_reply_generic(tm, has_user_data, exp_full_len); } @@ -1158,7 +1254,9 @@ mod tests { ) { assert!(tm.is_tm()); assert_eq!(PusPacket::service(tm), 17); + assert_eq!(GenericPusTmSecondaryHeader::service(tm), 17); assert_eq!(PusPacket::subservice(tm), 2); + assert_eq!(GenericPusTmSecondaryHeader::subservice(tm), 2); assert!(tm.sec_header_flag()); if has_user_data { assert!(!tm.user_data().is_empty()); @@ -1166,6 +1264,11 @@ mod tests { assert_eq!(PusPacket::pus_version(tm), PusC); assert_eq!(tm.apid(), 0x123); assert_eq!(tm.seq_count(), 0x234); + assert_eq!(PusPacket::pus_version(tm), PusVersion::PusC); + assert_eq!( + GenericPusTmSecondaryHeader::pus_version(tm), + PusVersion::PusC + ); assert_eq!(tm.data_len(), exp_full_len as u16 - 7); assert_eq!(tm.dest_id(), 0x0000); assert_eq!(tm.msg_counter(), 0x0000); @@ -1202,6 +1305,10 @@ mod tests { writer.set_msg_count(100); writer.set_seq_count(MAX_SEQ_COUNT); writer.set_apid(MAX_APID); + assert!(!writer.set_apid(MAX_APID + 1)); + assert!(!writer.set_apid(MAX_SEQ_COUNT + 1)); + assert_eq!(writer.service(), 17); + assert_eq!(writer.subservice(), 2); writer.finish(); // This performs all necessary checks, including the CRC check. let (tm_read_back, tm_size_read_back) = @@ -1212,4 +1319,95 @@ mod tests { assert_eq!(tm_read_back.seq_count(), MAX_SEQ_COUNT); assert_eq!(tm_read_back.apid(), MAX_APID); } + + #[test] + fn test_sec_header_without_stamp() { + let sec_header = PusTmSecondaryHeader::new_simple_no_timestamp(17, 1); + assert_eq!(sec_header.timestamp, &[]); + } + + #[test] + fn test_reader_partial_eq() { + let timestamp = dummy_timestamp(); + let pus_tm = base_ping_reply_full_ctor(timestamp); + let mut buf = [0; 32]; + pus_tm.write_to_bytes(&mut buf).unwrap(); + let (tm_0, _) = PusTmReader::new(&buf, timestamp.len()).unwrap(); + let (tm_1, _) = PusTmReader::new(&buf, timestamp.len()).unwrap(); + assert_eq!(tm_0, tm_1); + } + #[test] + fn test_reader_buf_too_small_2() { + let timestamp = dummy_timestamp(); + let pus_tm = base_ping_reply_full_ctor(timestamp); + let mut buf = [0; 32]; + let written = pus_tm.write_to_bytes(&mut buf).unwrap(); + let tm_error = PusTmReader::new( + &buf[0..PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA + 1], + timestamp.len(), + ); + assert!(tm_error.is_err()); + let tm_error = tm_error.unwrap_err(); + if let PusError::ByteConversion(ByteConversionError::FromSliceTooSmall { + found, + expected, + }) = tm_error + { + assert_eq!(found, PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA + 1); + assert_eq!(expected, written); + } else { + panic!("unexpected error {tm_error}") + } + } + #[test] + fn test_reader_buf_too_small() { + let timestamp = dummy_timestamp(); + let pus_tm = base_ping_reply_full_ctor(timestamp); + let mut buf = [0; 32]; + pus_tm.write_to_bytes(&mut buf).unwrap(); + let tm_error = PusTmReader::new(&buf[0..5], timestamp.len()); + assert!(tm_error.is_err()); + let tm_error = tm_error.unwrap_err(); + if let PusError::ByteConversion(ByteConversionError::FromSliceTooSmall { + found, + expected, + }) = tm_error + { + assert_eq!(found, 5); + assert_eq!(expected, PUS_TM_MIN_LEN_WITHOUT_SOURCE_DATA); + } else { + panic!("unexpected error {tm_error}") + } + } + + #[test] + #[cfg(feature = "serde")] + fn test_serialization_creator_serde() { + let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); + let time_provider = TimeProvider::new_with_u16_days(0, 0); + let mut stamp_buf: [u8; 8] = [0; 8]; + let pus_tm = + PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, &[], true) + .unwrap(); + + let output = to_allocvec(&pus_tm).unwrap(); + let output_converted_back: PusTmCreator = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, pus_tm); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serialization_reader_serde() { + let mut sph = SpHeader::tm_unseg(0x123, 0x234, 0).unwrap(); + let time_provider = TimeProvider::new_with_u16_days(0, 0); + let mut stamp_buf: [u8; 8] = [0; 8]; + let pus_tm = + PusTmCreator::new_simple(&mut sph, 17, 2, &time_provider, &mut stamp_buf, &[], true) + .unwrap(); + let pus_tm_vec = pus_tm.to_vec().unwrap(); + let (tm_reader, _) = PusTmReader::new(&pus_tm_vec, time_provider.len_as_bytes()).unwrap(); + let output = to_allocvec(&tm_reader).unwrap(); + let output_converted_back: PusTmReader = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, tm_reader); + } } diff --git a/src/lib.rs b/src/lib.rs index 2b422bd..36e6b2e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -749,7 +749,7 @@ pub mod zc { } #[cfg(all(test, feature = "std"))] -mod tests { +pub(crate) mod tests { use std::collections::HashSet; #[cfg(feature = "serde")] @@ -759,9 +759,13 @@ mod tests { }; use crate::{SequenceFlags, SpHeader}; use alloc::vec; + #[cfg(feature = "serde")] + use core::fmt::Debug; use num_traits::pow; #[cfg(feature = "serde")] use postcard::{from_bytes, to_allocvec}; + #[cfg(feature = "serde")] + use serde::{de::DeserializeOwned, Serialize}; const CONST_SP: SpHeader = SpHeader::new( PacketId::const_tc(true, 0x36), @@ -771,10 +775,19 @@ mod tests { const PACKET_ID_TM: PacketId = PacketId::const_tm(true, 0x22); + #[cfg(feature = "serde")] + pub(crate) fn generic_serde_test( + value: T, + ) { + let output: alloc::vec::Vec = to_allocvec(&value).unwrap(); + let output_converted_back: T = from_bytes(&output).unwrap(); + assert_eq!(output_converted_back, value); + } + #[test] fn verify_const_packet_id() { assert_eq!(PACKET_ID_TM.apid(), 0x22); - assert_eq!(PACKET_ID_TM.sec_header_flag, true); + assert!(PACKET_ID_TM.sec_header_flag); assert_eq!(PACKET_ID_TM.ptype, PacketType::Tm); let const_tc_id = PacketId::const_tc(true, 0x23); assert_eq!(const_tc_id.ptype, PacketType::Tc); @@ -880,6 +893,7 @@ mod tests { fn test_invalid_seq_count() { let mut psc = PacketSequenceCtrl::new(SequenceFlags::ContinuationSegment, 77) .expect("PSC creation failed"); + assert_eq!(psc.seq_count(), 77); assert!(!psc.set_seq_count(0xffff)); } diff --git a/src/time/cds.rs b/src/time/cds.rs index 85c7f65..3442819 100644 --- a/src/time/cds.rs +++ b/src/time/cds.rs @@ -64,10 +64,10 @@ pub enum LengthOfDaySegment { #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum SubmillisPrecision { - Absent, - Microseconds(u16), - Picoseconds(u32), - Reserved, + Absent = 0b00, + Microseconds = 0b01, + Picoseconds = 0b10, + Reserved = 0b11, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] @@ -107,8 +107,8 @@ pub fn length_of_day_segment_from_pfield(pfield: u8) -> LengthOfDaySegment { pub fn precision_from_pfield(pfield: u8) -> SubmillisPrecision { match pfield & 0b11 { - 0b01 => SubmillisPrecision::Microseconds(0), - 0b10 => SubmillisPrecision::Picoseconds(0), + 0b01 => SubmillisPrecision::Microseconds, + 0b10 => SubmillisPrecision::Picoseconds, 0b00 => SubmillisPrecision::Absent, 0b11 => SubmillisPrecision::Reserved, _ => panic!("pfield to SubmillisPrecision failed"), @@ -164,7 +164,8 @@ pub struct TimeProvider { pfield: u8, ccsds_days: DaysLen::FieldType, ms_of_day: u32, - submillis_precision: Option, + // submillis_precision: SubmillisPrecision, + submillis: u32, /// This is not strictly necessary but still cached because it significantly simplifies the /// calculation of [`DateTime`]. unix_stamp: UnixTimestamp, @@ -174,7 +175,8 @@ pub struct TimeProvider { /// /// Also exists to encapsulate properties used by private converters. pub trait CdsCommon { - fn submillis_precision(&self) -> Option; + fn submillis_precision(&self) -> SubmillisPrecision; + fn submillis(&self) -> u32; fn ms_of_day(&self) -> u32; fn ccsds_days_as_u32(&self) -> u32; } @@ -220,8 +222,8 @@ impl ConversionFromUnix { } impl CdsCommon for ConversionFromUnix { - fn submillis_precision(&self) -> Option { - None + fn submillis_precision(&self) -> SubmillisPrecision { + SubmillisPrecision::Absent } fn ms_of_day(&self) -> u32 { @@ -231,6 +233,10 @@ impl CdsCommon for ConversionFromUnix { fn ccsds_days_as_u32(&self) -> u32 { self.ccsds_days } + + fn submillis(&self) -> u32 { + 0 + } } impl CdsConverter for ConversionFromUnix { @@ -241,11 +247,12 @@ impl CdsConverter for ConversionFromUnix { /// Helper struct which generates fields for the CDS time provider from a datetime. struct ConversionFromDatetime { unix_conversion: ConversionFromUnix, - submillis_prec: Option, + submillis_prec: SubmillisPrecision, + submillis: u32, } impl CdsCommon for ConversionFromDatetime { - fn submillis_precision(&self) -> Option { + fn submillis_precision(&self) -> SubmillisPrecision { self.submillis_prec } @@ -255,6 +262,10 @@ impl CdsCommon for ConversionFromDatetime { fn ccsds_days_as_u32(&self) -> u32; } } + + fn submillis(&self) -> u32 { + self.submillis + } } impl CdsConverter for ConversionFromDatetime { @@ -276,21 +287,18 @@ fn calc_unix_days_and_secs_of_day(unix_seconds: i64) -> (i64, u32) { impl ConversionFromDatetime { fn new(dt: &DateTime) -> Result { - Self::new_generic(dt, None) + Self::new_generic(dt, SubmillisPrecision::Absent) } fn new_with_submillis_us_prec(dt: &DateTime) -> Result { - Self::new_generic(dt, Some(SubmillisPrecision::Microseconds(0))) + Self::new_generic(dt, SubmillisPrecision::Microseconds) } fn new_with_submillis_ps_prec(dt: &DateTime) -> Result { - Self::new_generic(dt, Some(SubmillisPrecision::Picoseconds(0))) + Self::new_generic(dt, SubmillisPrecision::Picoseconds) } - fn new_generic( - dt: &DateTime, - mut prec: Option, - ) -> Result { + fn new_generic(dt: &DateTime, prec: SubmillisPrecision) -> Result { // The CDS timestamp does not support timestamps before the CCSDS epoch. if dt.year() < 1958 { return Err(TimestampError::DateBeforeCcsdsEpoch(*dt)); @@ -298,24 +306,20 @@ impl ConversionFromDatetime { // The contained values in the conversion should be all positive now let unix_conversion = ConversionFromUnix::new(dt.timestamp(), dt.timestamp_subsec_millis())?; - if let Some(submilli_prec) = prec { - match submilli_prec { - SubmillisPrecision::Microseconds(_) => { - prec = Some(SubmillisPrecision::Microseconds( - (dt.timestamp_subsec_micros() % 1000) as u16, - )); - } - SubmillisPrecision::Picoseconds(_) => { - prec = Some(SubmillisPrecision::Picoseconds( - (dt.timestamp_subsec_nanos() % 10_u32.pow(6)) * 1000, - )); - } - _ => (), + let mut submillis = 0; + match prec { + SubmillisPrecision::Microseconds => { + submillis = dt.timestamp_subsec_micros() % 1000; } + SubmillisPrecision::Picoseconds => { + submillis = (dt.timestamp_subsec_nanos() % 10_u32.pow(6)) * 1000; + } + _ => (), } Ok(Self { unix_conversion, submillis_prec: prec, + submillis, }) } } @@ -323,55 +327,52 @@ impl ConversionFromDatetime { #[cfg(feature = "std")] struct ConversionFromNow { unix_conversion: ConversionFromUnix, - submillis_prec: Option, + submillis_prec: SubmillisPrecision, + submillis: u32, } #[cfg(feature = "std")] impl ConversionFromNow { fn new() -> Result { - Self::new_generic(None) + Self::new_generic(SubmillisPrecision::Absent) } fn new_with_submillis_us_prec() -> Result { - Self::new_generic(Some(SubmillisPrecision::Microseconds(0))) + Self::new_generic(SubmillisPrecision::Microseconds) } fn new_with_submillis_ps_prec() -> Result { - Self::new_generic(Some(SubmillisPrecision::Picoseconds(0))) + Self::new_generic(SubmillisPrecision::Picoseconds) } - fn new_generic(mut prec: Option) -> Result { + fn new_generic(prec: SubmillisPrecision) -> Result { let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?; let epoch = now.as_secs(); // This should always return a value with valid (non-negative) CCSDS days, // so it is okay to unwrap let unix_conversion = ConversionFromUnix::new(epoch as i64, now.subsec_millis()).unwrap(); - // Both values should now be positive - if let Some(submilli_prec) = prec { - match submilli_prec { - SubmillisPrecision::Microseconds(_) => { - prec = Some(SubmillisPrecision::Microseconds( - (now.subsec_micros() % 1000) as u16, - )); - } - SubmillisPrecision::Picoseconds(_) => { - prec = Some(SubmillisPrecision::Picoseconds( - (now.subsec_nanos() % 10_u32.pow(6)) * 1000, - )); - } - _ => (), + let mut submillis = 0; + + match prec { + SubmillisPrecision::Microseconds => { + submillis = now.subsec_micros() % 1000; } + SubmillisPrecision::Picoseconds => { + submillis = (now.subsec_nanos() % 10_u32.pow(6)) * 1000; + } + _ => (), } Ok(Self { unix_conversion, submillis_prec: prec, + submillis, }) } } #[cfg(feature = "std")] impl CdsCommon for ConversionFromNow { - fn submillis_precision(&self) -> Option { + fn submillis_precision(&self) -> SubmillisPrecision { self.submillis_prec } delegate! { @@ -380,6 +381,10 @@ impl CdsCommon for ConversionFromNow { fn ccsds_days_as_u32(&self) -> u32; } } + + fn submillis(&self) -> u32 { + self.submillis + } } #[cfg(feature = "std")] @@ -403,7 +408,9 @@ impl DynCdsTimeProvider for TimeProvider {} /// # Example /// /// ``` -/// use spacepackets::time::cds::{TimeProvider, LengthOfDaySegment, get_dyn_time_provider_from_bytes}; +/// use spacepackets::time::cds::{ +/// TimeProvider, LengthOfDaySegment, get_dyn_time_provider_from_bytes, SubmillisPrecision, +/// }; /// use spacepackets::time::{TimeWriter, CcsdsTimeCodes, CcsdsTimeProvider}; /// /// let timestamp_now = TimeProvider::new_with_u16_days(24, 24); @@ -418,7 +425,7 @@ impl DynCdsTimeProvider for TimeProvider {} /// assert_eq!(dyn_provider.len_of_day_seg(), LengthOfDaySegment::Short16Bits); /// assert_eq!(dyn_provider.ccsds_days_as_u32(), 24); /// assert_eq!(dyn_provider.ms_of_day(), 24); -/// assert_eq!(dyn_provider.submillis_precision(), None); +/// assert_eq!(dyn_provider.submillis_precision(), SubmillisPrecision::Absent); /// } /// ``` #[cfg(feature = "alloc")] @@ -448,8 +455,8 @@ pub fn get_dyn_time_provider_from_bytes( } impl CdsCommon for TimeProvider { - fn submillis_precision(&self) -> Option { - self.submillis_precision + fn submillis_precision(&self) -> SubmillisPrecision { + precision_from_pfield(self.pfield) } fn ms_of_day(&self) -> u32 { @@ -459,31 +466,42 @@ impl CdsCommon for TimeProvider u32 { self.ccsds_days.into() } + + fn submillis(&self) -> u32 { + self.submillis + } } impl TimeProvider { /// Please note that a precision value of 0 will be converted to [None] (no precision). - pub fn set_submillis_precision(&mut self, prec: SubmillisPrecision) { + pub fn set_submillis(&mut self, prec: SubmillisPrecision, value: u32) -> bool { self.pfield &= !(0b11); if let SubmillisPrecision::Absent = prec { - self.submillis_precision = None; - return; + // self.submillis_precision = prec; + self.submillis = 0; + return true; } - self.submillis_precision = Some(prec); + // self.submillis_precision = prec; match prec { - SubmillisPrecision::Microseconds(_) => { - self.pfield |= 0b01; + SubmillisPrecision::Microseconds => { + if value > u16::MAX as u32 { + return false; + } + self.pfield |= SubmillisPrecision::Microseconds as u8; + self.submillis = value; } - SubmillisPrecision::Picoseconds(_) => { - self.pfield |= 0b10; + SubmillisPrecision::Picoseconds => { + self.pfield |= SubmillisPrecision::Picoseconds as u8; + self.submillis = value; } _ => (), } + true } - pub fn clear_submillis_precision(&mut self) { + pub fn clear_submillis(&mut self) { self.pfield &= !(0b11); - self.submillis_precision = None; + self.submillis = 0; } pub fn ccsds_days(&self) -> ProvidesDaysLen::FieldType { @@ -493,20 +511,11 @@ impl TimeProvider { /// Maps the submillisecond precision to a nanosecond value. This will reduce precision when /// using picosecond resolution, but significantly simplifies comparison of timestamps. pub fn precision_as_ns(&self) -> Option { - if let Some(prec) = self.submillis_precision { - match prec { - SubmillisPrecision::Microseconds(us) => { - return Some(us as u32 * 1000); - } - SubmillisPrecision::Picoseconds(ps) => { - return Some(ps / 1000); - } - _ => { - return None; - } - } + match self.submillis_precision() { + SubmillisPrecision::Microseconds => Some(self.submillis * 1000), + SubmillisPrecision::Picoseconds => Some(self.submillis / 1000), + _ => None, } - None } fn generic_raw_read_checks( @@ -624,11 +633,11 @@ impl TimeProvider { i64: From, { let mut provider = Self { - pfield: Self::generate_p_field(days_len, None), + pfield: Self::generate_p_field(days_len, SubmillisPrecision::Absent), ccsds_days, ms_of_day, unix_stamp: Default::default(), - submillis_precision: None, + submillis: 0, }; let unix_days_seconds = ccsds_to_unix_days(i64::from(ccsds_days)) * SECONDS_PER_DAY as i64; provider.setup(unix_days_seconds, ms_of_day); @@ -706,7 +715,7 @@ impl TimeProvider { ccsds_days, ms_of_day: converter.ms_of_day(), unix_stamp: Default::default(), - submillis_precision: converter.submillis_precision(), + submillis: converter.submillis(), }; provider.setup(converter.unix_days_seconds(), converter.ms_of_day()); Ok(provider) @@ -714,32 +723,20 @@ impl TimeProvider { #[cfg(feature = "std")] fn generic_conversion_from_now(&self) -> Result { - Ok(match self.submillis_precision { - None => ConversionFromNow::new()?, - Some(prec) => match prec { - SubmillisPrecision::Microseconds(_) => { - ConversionFromNow::new_with_submillis_us_prec()? - } - SubmillisPrecision::Picoseconds(_) => { - ConversionFromNow::new_with_submillis_ps_prec()? - } - _ => ConversionFromNow::new()?, - }, + Ok(match self.submillis_precision() { + SubmillisPrecision::Microseconds => ConversionFromNow::new_with_submillis_us_prec()?, + SubmillisPrecision::Picoseconds => ConversionFromNow::new_with_submillis_ps_prec()?, + _ => ConversionFromNow::new()?, }) } - fn generate_p_field( - day_seg_len: LengthOfDaySegment, - submillis_prec: Option, - ) -> u8 { + fn generate_p_field(day_seg_len: LengthOfDaySegment, submillis_prec: SubmillisPrecision) -> u8 { let mut pfield = P_FIELD_BASE | ((day_seg_len as u8) << 2); - if let Some(submillis_prec) = submillis_prec { - match submillis_prec { - SubmillisPrecision::Microseconds(_) => pfield |= 0b01, - SubmillisPrecision::Picoseconds(_) => pfield |= 0b10, - SubmillisPrecision::Reserved => pfield |= 0b11, - _ => (), - } + match submillis_prec { + SubmillisPrecision::Microseconds => pfield |= SubmillisPrecision::Microseconds as u8, + SubmillisPrecision::Picoseconds => pfield |= SubmillisPrecision::Picoseconds as u8, + SubmillisPrecision::Reserved => pfield |= SubmillisPrecision::Reserved as u8, + _ => (), } pfield } @@ -843,14 +840,18 @@ impl TimeProvider { let ms_of_day: u32 = u32::from_be_bytes(buf[4..8].try_into().unwrap()); let mut provider = Self::new_with_u24_days(cccsds_days, ms_of_day)?; match submillis_precision { - SubmillisPrecision::Microseconds(_) => { - provider.set_submillis_precision(SubmillisPrecision::Microseconds( - u16::from_be_bytes(buf[8..10].try_into().unwrap()), - )) + SubmillisPrecision::Microseconds => { + provider.set_submillis( + SubmillisPrecision::Microseconds, + u16::from_be_bytes(buf[8..10].try_into().unwrap()) as u32, + ); + } + SubmillisPrecision::Picoseconds => { + provider.set_submillis( + SubmillisPrecision::Picoseconds, + u32::from_be_bytes(buf[8..12].try_into().unwrap()), + ); } - SubmillisPrecision::Picoseconds(_) => provider.set_submillis_precision( - SubmillisPrecision::Picoseconds(u32::from_be_bytes(buf[8..12].try_into().unwrap())), - ), _ => (), } Ok(provider) @@ -924,13 +925,20 @@ impl TimeProvider { let ms_of_day: u32 = u32::from_be_bytes(buf[3..7].try_into().unwrap()); let mut provider = Self::new_with_u16_days(ccsds_days, ms_of_day); provider.pfield = buf[0]; + match submillis_precision { - SubmillisPrecision::Microseconds(_) => provider.set_submillis_precision( - SubmillisPrecision::Microseconds(u16::from_be_bytes(buf[7..9].try_into().unwrap())), - ), - SubmillisPrecision::Picoseconds(_) => provider.set_submillis_precision( - SubmillisPrecision::Picoseconds(u32::from_be_bytes(buf[7..11].try_into().unwrap())), - ), + SubmillisPrecision::Microseconds => { + provider.set_submillis( + SubmillisPrecision::Microseconds, + u16::from_be_bytes(buf[7..9].try_into().unwrap()) as u32, + ); + } + SubmillisPrecision::Picoseconds => { + provider.set_submillis( + SubmillisPrecision::Picoseconds, + u32::from_be_bytes(buf[7..11].try_into().unwrap()), + ); + } _ => (), } Ok(provider) @@ -941,7 +949,7 @@ fn add_for_max_ccsds_days_val( time_provider: &TimeProvider, max_days_val: u32, duration: Duration, -) -> (u32, u32, Option) { +) -> (u32, u32, u32) { let mut next_ccsds_days = time_provider.ccsds_days_as_u32(); let mut next_ms_of_day = time_provider.ms_of_day; // Increment CCSDS days by a certain amount while also accounting for overflow. @@ -963,47 +971,43 @@ fn add_for_max_ccsds_days_val( increment_days(ccsds_days, 1); } }; - let precision = if let Some(submillis_prec) = time_provider.submillis_precision { - match submillis_prec { - SubmillisPrecision::Microseconds(mut us) => { - let subsec_micros = duration.subsec_micros(); - let subsec_millis = subsec_micros / 1000; - let submilli_micros = (subsec_micros % 1000) as u16; - us += submilli_micros; - if us >= 1000 { - let carryover_us = us - 1000; - increment_ms_of_day(&mut next_ms_of_day, 1, &mut next_ccsds_days); - us = carryover_us; - } - increment_ms_of_day(&mut next_ms_of_day, subsec_millis, &mut next_ccsds_days); - Some(SubmillisPrecision::Microseconds(us)) + let mut submillis = time_provider.submillis(); + match time_provider.submillis_precision() { + SubmillisPrecision::Microseconds => { + let subsec_micros = duration.subsec_micros(); + let subsec_millis = subsec_micros / 1000; + let submilli_micros = subsec_micros % 1000; + submillis += submilli_micros; + if submillis >= 1000 { + let carryover_us = submillis - 1000; + increment_ms_of_day(&mut next_ms_of_day, 1, &mut next_ccsds_days); + submillis = carryover_us; } - SubmillisPrecision::Picoseconds(mut ps) => { - let subsec_nanos = duration.subsec_nanos(); - let subsec_millis = subsec_nanos / 10_u32.pow(6); - // 1 ms as ns is 1e6. - let submilli_nanos = subsec_nanos % 10_u32.pow(6); - // No overflow risk: The maximum value of an u32 is ~4.294e9, and one ms as ps - // is 1e9. The amount ps can now have is always less than 2e9. - ps += submilli_nanos * 1000; - if ps >= 10_u32.pow(9) { - let carry_over_ps = ps - 10_u32.pow(9); - increment_ms_of_day(&mut next_ms_of_day, 1, &mut next_ccsds_days); - ps = carry_over_ps; - } - increment_ms_of_day(&mut next_ms_of_day, subsec_millis, &mut next_ccsds_days); - Some(SubmillisPrecision::Picoseconds(ps)) - } - _ => None, + increment_ms_of_day(&mut next_ms_of_day, subsec_millis, &mut next_ccsds_days); } - } else { - increment_ms_of_day( - &mut next_ms_of_day, - duration.subsec_millis(), - &mut next_ccsds_days, - ); - None - }; + SubmillisPrecision::Picoseconds => { + let subsec_nanos = duration.subsec_nanos(); + let subsec_millis = subsec_nanos / 10_u32.pow(6); + // 1 ms as ns is 1e6. + let submilli_nanos = subsec_nanos % 10_u32.pow(6); + // No overflow risk: The maximum value of an u32 is ~4.294e9, and one ms as ps + // is 1e9. The amount ps can now have is always less than 2e9. + submillis += submilli_nanos * 1000; + if submillis >= 10_u32.pow(9) { + let carry_over_ps = submillis - 10_u32.pow(9); + increment_ms_of_day(&mut next_ms_of_day, 1, &mut next_ccsds_days); + submillis = carry_over_ps; + } + increment_ms_of_day(&mut next_ms_of_day, subsec_millis, &mut next_ccsds_days); + } + _ => { + increment_ms_of_day( + &mut next_ms_of_day, + duration.subsec_millis(), + &mut next_ccsds_days, + ); + } + } // The subsecond millisecond were already handled. let full_seconds = duration.as_secs(); let secs_of_day = (full_seconds % SECONDS_PER_DAY as u64) as u32; @@ -1013,7 +1017,7 @@ fn add_for_max_ccsds_days_val( &mut next_ccsds_days, (full_seconds as u32 - secs_of_day) / SECONDS_PER_DAY, ); - (next_ccsds_days, next_ms_of_day, precision) + (next_ccsds_days, next_ms_of_day, submillis) } impl CdsTimestamp for TimeProvider { @@ -1038,9 +1042,7 @@ impl Add for TimeProvider { let (next_ccsds_days, next_ms_of_day, precision) = add_for_max_ccsds_days_val(&self, u16::MAX as u32, duration); let mut provider = Self::new_with_u16_days(next_ccsds_days as u16, next_ms_of_day); - if let Some(prec) = precision { - provider.set_submillis_precision(prec); - } + provider.set_submillis(self.submillis_precision(), precision); provider } } @@ -1052,9 +1054,7 @@ impl Add for &TimeProvider { let (next_ccsds_days, next_ms_of_day, precision) = add_for_max_ccsds_days_val(self, u16::MAX as u32, duration); let mut provider = Self::Output::new_with_u16_days(next_ccsds_days as u16, next_ms_of_day); - if let Some(prec) = precision { - provider.set_submillis_precision(prec); - } + provider.set_submillis(self.submillis_precision(), precision); provider } } @@ -1069,9 +1069,7 @@ impl Add for TimeProvider { let (next_ccsds_days, next_ms_of_day, precision) = add_for_max_ccsds_days_val(&self, MAX_DAYS_24_BITS, duration); let mut provider = Self::new_with_u24_days(next_ccsds_days, next_ms_of_day).unwrap(); - if let Some(prec) = precision { - provider.set_submillis_precision(prec); - } + provider.set_submillis(self.submillis_precision(), precision); provider } } @@ -1083,9 +1081,7 @@ impl Add for &TimeProvider { add_for_max_ccsds_days_val(self, MAX_DAYS_24_BITS, duration); let mut provider = Self::Output::new_with_u24_days(next_ccsds_days, next_ms_of_day).unwrap(); - if let Some(prec) = precision { - provider.set_submillis_precision(prec); - } + provider.set_submillis(self.submillis_precision(), precision); provider } } @@ -1095,11 +1091,11 @@ impl Add for &TimeProvider { /// days overflow when this is a possibility and might be a problem. impl AddAssign for TimeProvider { fn add_assign(&mut self, duration: Duration) { - let (next_ccsds_days, next_ms_of_day, precision) = + let (next_ccsds_days, next_ms_of_day, submillis) = add_for_max_ccsds_days_val(self, u16::MAX as u32, duration); self.ccsds_days = next_ccsds_days as u16; self.ms_of_day = next_ms_of_day; - self.submillis_precision = precision; + self.submillis = submillis; } } @@ -1108,11 +1104,11 @@ impl AddAssign for TimeProvider { /// days overflow when this is a possibility and might be a problem. impl AddAssign for TimeProvider { fn add_assign(&mut self, duration: Duration) { - let (next_ccsds_days, next_ms_of_day, precision) = + let (next_ccsds_days, next_ms_of_day, submillis) = add_for_max_ccsds_days_val(self, MAX_DAYS_24_BITS, duration); self.ccsds_days = next_ccsds_days; self.ms_of_day = next_ms_of_day; - self.submillis_precision = precision; + self.submillis = submillis; } } @@ -1163,16 +1159,14 @@ impl CcsdsTimeProvider for TimeProvider Option> { let mut ns_since_last_sec = (self.ms_of_day % 1000) * 10_u32.pow(6); - if let Some(precision) = self.submillis_precision { - match precision { - SubmillisPrecision::Microseconds(us) => { - ns_since_last_sec += us as u32 * 1000; - } - SubmillisPrecision::Picoseconds(ps) => { - ns_since_last_sec += ps / 1000; - } - _ => (), + match self.submillis_precision() { + SubmillisPrecision::Microseconds => { + ns_since_last_sec += self.submillis() * 1000; } + SubmillisPrecision::Picoseconds => { + ns_since_last_sec += self.submillis() / 1000; + } + _ => (), } self.calc_date_time(ns_since_last_sec) } @@ -1196,16 +1190,14 @@ impl TimeWriter for TimeProvider { buf[0] = self.pfield; buf[1..3].copy_from_slice(self.ccsds_days.to_be_bytes().as_slice()); buf[3..7].copy_from_slice(self.ms_of_day.to_be_bytes().as_slice()); - if let Some(submillis_prec) = self.submillis_precision { - match submillis_prec { - SubmillisPrecision::Microseconds(ms) => { - buf[7..9].copy_from_slice(ms.to_be_bytes().as_slice()); - } - SubmillisPrecision::Picoseconds(ps) => { - buf[7..11].copy_from_slice(ps.to_be_bytes().as_slice()); - } - _ => (), + match self.submillis_precision() { + SubmillisPrecision::Microseconds => { + buf[7..9].copy_from_slice((self.submillis() as u16).to_be_bytes().as_slice()); } + SubmillisPrecision::Picoseconds => { + buf[7..11].copy_from_slice(self.submillis().to_be_bytes().as_slice()); + } + _ => (), } Ok(self.len_as_bytes()) } @@ -1218,16 +1210,14 @@ impl TimeWriter for TimeProvider { let be_days = self.ccsds_days.to_be_bytes(); buf[1..4].copy_from_slice(&be_days[1..4]); buf[4..8].copy_from_slice(self.ms_of_day.to_be_bytes().as_slice()); - if let Some(submillis_prec) = self.submillis_precision { - match submillis_prec { - SubmillisPrecision::Microseconds(ms) => { - buf[8..10].copy_from_slice(ms.to_be_bytes().as_slice()); - } - SubmillisPrecision::Picoseconds(ps) => { - buf[8..12].copy_from_slice(ps.to_be_bytes().as_slice()); - } - _ => (), + match self.submillis_precision() { + SubmillisPrecision::Microseconds => { + buf[8..10].copy_from_slice((self.submillis() as u16).to_be_bytes().as_slice()); } + SubmillisPrecision::Picoseconds => { + buf[8..12].copy_from_slice(self.submillis().to_be_bytes().as_slice()); + } + _ => (), } Ok(self.len_as_bytes()) } @@ -1307,6 +1297,7 @@ mod tests { use super::*; use crate::time::TimestampError::{ByteConversion, InvalidTimeCode}; use crate::ByteConversionError::{FromSliceTooSmall, ToSliceTooSmall}; + use alloc::string::ToString; use chrono::{Datelike, NaiveDate, Timelike}; #[cfg(feature = "serde")] use postcard::{from_bytes, to_allocvec}; @@ -1322,7 +1313,10 @@ mod tests { ); let subsecond_millis = unix_stamp.subsecond_millis; assert!(subsecond_millis.is_none()); - assert_eq!(time_stamper.submillis_precision(), None); + assert_eq!( + time_stamper.submillis_precision(), + SubmillisPrecision::Absent + ); assert!(time_stamper.subsecond_millis().is_none()); assert_eq!(time_stamper.ccdsd_time_code(), CcsdsTimeCodes::Cds); assert_eq!( @@ -1342,7 +1336,10 @@ mod tests { fn test_time_stamp_unix_epoch() { let time_stamper = TimeProvider::new_with_u16_days((-DAYS_CCSDS_TO_UNIX) as u16, 0); assert_eq!(time_stamper.unix_stamp().unix_seconds, 0); - assert_eq!(time_stamper.submillis_precision(), None); + assert_eq!( + time_stamper.submillis_precision(), + SubmillisPrecision::Absent + ); let date_time = time_stamper.date_time().unwrap(); assert_eq!(date_time.year(), 1970); assert_eq!(date_time.month(), 1); @@ -1415,6 +1412,11 @@ mod tests { fn test_write() { let mut buf = [0; 16]; let time_stamper_0 = TimeProvider::new_with_u16_days(0, 0); + let unix_stamp = time_stamper_0.unix_stamp(); + assert_eq!( + unix_stamp.unix_seconds, + (DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32).into() + ); let mut res = time_stamper_0.write_to_bytes(&mut buf); assert!(res.is_ok()); assert_eq!(buf[0], (CcsdsTimeCodes::Cds as u8) << 4); @@ -1447,10 +1449,15 @@ mod tests { for i in 0..6 { let res = time_stamper.write_to_bytes(&mut buf[0..i]); assert!(res.is_err()); - match res.unwrap_err() { + let error = res.unwrap_err(); + match error { ByteConversion(ToSliceTooSmall { found, expected }) => { assert_eq!(found, i); assert_eq!(expected, 7); + assert_eq!( + error.to_string(), + format!("time stamp: target slice with size {i} is too small, expected size of at least 7") + ); } _ => panic!( "{}", @@ -1492,12 +1499,13 @@ mod tests { let res = TimeProvider::::from_bytes(&buf); assert!(res.is_err()); let err = res.unwrap_err(); - match err { - InvalidTimeCode { expected, found } => { - assert_eq!(expected, CcsdsTimeCodes::Cds); - assert_eq!(found, 0); - } - _ => {} + if let InvalidTimeCode { expected, found } = err { + assert_eq!(expected, CcsdsTimeCodes::Cds); + assert_eq!(found, 0); + assert_eq!( + err.to_string(), + "invalid raw time code value 0 for time code Cds" + ); } } @@ -1586,15 +1594,12 @@ mod tests { #[test] fn test_submillis_precision_micros() { let mut time_stamper = TimeProvider::new_with_u16_days(0, 0); - time_stamper.set_submillis_precision(SubmillisPrecision::Microseconds(500)); - assert!(time_stamper.submillis_precision().is_some()); - if let SubmillisPrecision::Microseconds(micros) = - time_stamper.submillis_precision().unwrap() - { - assert_eq!(micros, 500); - } else { - panic!("Submillis precision was not set properly"); - } + time_stamper.set_submillis(SubmillisPrecision::Microseconds, 500); + assert_eq!( + time_stamper.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(time_stamper.submillis(), 500); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1607,13 +1612,12 @@ mod tests { #[test] fn test_submillis_precision_picos() { let mut time_stamper = TimeProvider::new_with_u16_days(0, 0); - time_stamper.set_submillis_precision(SubmillisPrecision::Picoseconds(5e8 as u32)); - assert!(time_stamper.submillis_precision().is_some()); - if let SubmillisPrecision::Picoseconds(ps) = time_stamper.submillis_precision().unwrap() { - assert_eq!(ps, 5e8 as u32); - } else { - panic!("Submillis precision was not set properly"); - } + time_stamper.set_submillis(SubmillisPrecision::Picoseconds, 5e8 as u32); + assert_eq!( + time_stamper.submillis_precision(), + SubmillisPrecision::Picoseconds + ); + assert_eq!(time_stamper.submillis(), 5e8 as u32); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1626,7 +1630,7 @@ mod tests { #[test] fn read_stamp_with_ps_submillis_precision() { let mut time_stamper = TimeProvider::new_with_u16_days(0, 0); - time_stamper.set_submillis_precision(SubmillisPrecision::Picoseconds(5e8 as u32)); + time_stamper.set_submillis(SubmillisPrecision::Picoseconds, 5e8 as u32); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1636,19 +1640,17 @@ mod tests { assert!(stamp_deserialized.is_ok()); let stamp_deserialized = stamp_deserialized.unwrap(); assert_eq!(stamp_deserialized.len_as_bytes(), 11); - assert!(stamp_deserialized.submillis_precision().is_some()); - let submillis_rec = stamp_deserialized.submillis_precision().unwrap(); - if let SubmillisPrecision::Picoseconds(ps) = submillis_rec { - assert_eq!(ps, 5e8 as u32); - } else { - panic!("Wrong precision field detected"); - } + assert_eq!( + stamp_deserialized.submillis_precision(), + SubmillisPrecision::Picoseconds + ); + assert_eq!(stamp_deserialized.submillis(), 5e8 as u32); } #[test] fn read_stamp_with_us_submillis_precision() { let mut time_stamper = TimeProvider::new_with_u16_days(0, 0); - time_stamper.set_submillis_precision(SubmillisPrecision::Microseconds(500)); + time_stamper.set_submillis(SubmillisPrecision::Microseconds, 500); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1658,19 +1660,17 @@ mod tests { assert!(stamp_deserialized.is_ok()); let stamp_deserialized = stamp_deserialized.unwrap(); assert_eq!(stamp_deserialized.len_as_bytes(), 9); - assert!(stamp_deserialized.submillis_precision().is_some()); - let submillis_rec = stamp_deserialized.submillis_precision().unwrap(); - if let SubmillisPrecision::Microseconds(us) = submillis_rec { - assert_eq!(us, 500); - } else { - panic!("Wrong precision field detected"); - } + assert_eq!( + stamp_deserialized.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(stamp_deserialized.submillis(), 500); } #[test] fn read_u24_stamp_with_us_submillis_precision() { let mut time_stamper = TimeProvider::new_with_u24_days(u16::MAX as u32 + 1, 0).unwrap(); - time_stamper.set_submillis_precision(SubmillisPrecision::Microseconds(500)); + time_stamper.set_submillis(SubmillisPrecision::Microseconds, 500); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1682,19 +1682,17 @@ mod tests { let stamp_deserialized = stamp_deserialized.unwrap(); assert_eq!(stamp_deserialized.len_as_bytes(), 10); assert_eq!(stamp_deserialized.ccsds_days(), u16::MAX as u32 + 1); - assert!(stamp_deserialized.submillis_precision().is_some()); - let submillis_rec = stamp_deserialized.submillis_precision().unwrap(); - if let SubmillisPrecision::Microseconds(us) = submillis_rec { - assert_eq!(us, 500); - } else { - panic!("Wrong precision field detected"); - } + assert_eq!( + stamp_deserialized.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(stamp_deserialized.submillis(), 500); } #[test] fn read_u24_stamp_with_ps_submillis_precision() { let mut time_stamper = TimeProvider::new_with_u24_days(u16::MAX as u32 + 1, 0).unwrap(); - time_stamper.set_submillis_precision(SubmillisPrecision::Picoseconds(5e8 as u32)); + time_stamper.set_submillis(SubmillisPrecision::Picoseconds, 5e8 as u32); let mut write_buf: [u8; 16] = [0; 16]; let written = time_stamper .write_to_bytes(&mut write_buf) @@ -1706,13 +1704,11 @@ mod tests { let stamp_deserialized = stamp_deserialized.unwrap(); assert_eq!(stamp_deserialized.len_as_bytes(), 12); assert_eq!(stamp_deserialized.ccsds_days(), u16::MAX as u32 + 1); - assert!(stamp_deserialized.submillis_precision().is_some()); - let submillis_rec = stamp_deserialized.submillis_precision().unwrap(); - if let SubmillisPrecision::Picoseconds(ps) = submillis_rec { - assert_eq!(ps, 5e8 as u32); - } else { - panic!("Wrong precision field detected"); - } + assert_eq!( + stamp_deserialized.submillis_precision(), + SubmillisPrecision::Picoseconds + ); + assert_eq!(stamp_deserialized.submillis(), 5e8 as u32); } fn generic_dt_case_0_no_prec(subsec_millis: u32) -> DateTime { @@ -1785,13 +1781,11 @@ mod tests { time_provider.ms_of_day, 30 * 1000 + 49 * 60 * 1000 + 16 * 60 * 60 * 1000 + subsec_millis ); - assert!(time_provider.submillis_precision.is_some()); - match time_provider.submillis_precision.unwrap() { - SubmillisPrecision::Microseconds(us) => { - assert_eq!(us, 500); - } - _ => panic!("unexpected precision field"), - } + assert_eq!( + time_provider.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(time_provider.submillis(), 500); assert_eq!(time_provider.date_time().unwrap(), datetime_utc); } @@ -1841,13 +1835,11 @@ mod tests { time_provider.ms_of_day, 30 * 1000 + 49 * 60 * 1000 + 16 * 60 * 60 * 1000 + subsec_millis ); - assert!(time_provider.submillis_precision.is_some()); - match time_provider.submillis_precision.unwrap() { - SubmillisPrecision::Picoseconds(ps) => { - assert_eq!(ps, submilli_nanos * 1000); - } - _ => panic!("unexpected precision field"), - } + assert_eq!( + time_provider.submillis_precision(), + SubmillisPrecision::Picoseconds + ); + assert_eq!(time_provider.submillis(), submilli_nanos * 1000); assert_eq!(time_provider.date_time().unwrap(), datetime_utc); } @@ -1942,7 +1934,7 @@ mod tests { let invalid_unix_secs: i64 = (u16::MAX as i64 + 1) * SECONDS_PER_DAY as i64; let subsec_millis = 0; match TimeProvider::from_unix_secs_with_u16_days(&UnixTimestamp::const_new( - invalid_unix_secs as i64, + invalid_unix_secs, subsec_millis, )) { Ok(_) => { @@ -1954,6 +1946,7 @@ mod tests { days, unix_to_ccsds_days(invalid_unix_secs / SECONDS_PER_DAY as i64) ); + assert_eq!(e.to_string(), "cds error: invalid ccsds days 69919"); } else { panic!("unexpected error {}", e) } @@ -2039,7 +2032,10 @@ mod tests { assert_eq!(dyn_provider.ccdsd_time_code(), CcsdsTimeCodes::Cds); assert_eq!(dyn_provider.ccsds_days_as_u32(), u16::MAX as u32 + 1); assert_eq!(dyn_provider.ms_of_day(), 24); - assert_eq!(dyn_provider.submillis_precision(), None); + assert_eq!( + dyn_provider.submillis_precision(), + SubmillisPrecision::Absent + ); assert_eq!( dyn_provider.len_of_day_seg(), LengthOfDaySegment::Long24Bits @@ -2049,64 +2045,58 @@ mod tests { #[test] fn test_addition_with_us_precision_u16_days() { let mut provider = TimeProvider::new_with_u16_days(0, 0); - provider.set_submillis_precision(SubmillisPrecision::Microseconds(0)); + provider.set_submillis(SubmillisPrecision::Microseconds, 0); let duration = Duration::from_micros(500); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - if let SubmillisPrecision::Microseconds(us) = prec { - assert_eq!(us, 500); - } else { - panic!("invalid precision {:?}", prec) - } + assert_eq!( + provider.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(provider.submillis(), 500); } #[test] fn test_addition_with_us_precision_u16_days_with_subsec_millis() { let mut provider = TimeProvider::new_with_u16_days(0, 0); - provider.set_submillis_precision(SubmillisPrecision::Microseconds(0)); + provider.set_submillis(SubmillisPrecision::Microseconds, 0); let duration = Duration::from_micros(1200); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - if let SubmillisPrecision::Microseconds(us) = prec { - assert_eq!(us, 200); - assert_eq!(provider.ms_of_day, 1); - } else { - panic!("invalid precision {:?}", prec) - } + assert_eq!( + provider.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(provider.submillis(), 200); + assert_eq!(provider.ms_of_day(), 1); } #[test] fn test_addition_with_us_precision_u16_days_carry_over() { let mut provider = TimeProvider::new_with_u16_days(0, 0); - provider.set_submillis_precision(SubmillisPrecision::Microseconds(800)); + provider.set_submillis(SubmillisPrecision::Microseconds, 800); let duration = Duration::from_micros(400); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - if let SubmillisPrecision::Microseconds(us) = prec { - assert_eq!(us, 200); - assert_eq!(provider.ms_of_day, 1); - } else { - panic!("invalid precision {:?}", prec) - } + + assert_eq!( + provider.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(provider.submillis(), 200); + assert_eq!(provider.ms_of_day(), 1); } #[test] fn test_addition_with_ps_precision_u16_days() { let mut provider = TimeProvider::new_with_u16_days(0, 0); - provider.set_submillis_precision(SubmillisPrecision::Picoseconds(0)); + provider.set_submillis(SubmillisPrecision::Picoseconds, 0); // 500 us as ns let duration = Duration::from_nanos(500 * 10u32.pow(3) as u64); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - if let SubmillisPrecision::Picoseconds(ps) = prec { - assert_eq!(ps, 500 * 10_u32.pow(6)); - } else { - panic!("invalid precision {:?}", prec) - } + + assert_eq!( + provider.submillis_precision(), + SubmillisPrecision::Picoseconds + ); + assert_eq!(provider.submillis(), 500 * 10u32.pow(6)); } #[test] @@ -2119,9 +2109,9 @@ mod tests { assert_eq!(new_stamp.ms_of_day, 1000); } - fn check_ps_and_carryover(prec: SubmillisPrecision, ms_of_day: u32, val: u32) { - if let SubmillisPrecision::Picoseconds(ps) = prec { - assert_eq!(ps, val); + fn check_ps_and_carryover(prec: SubmillisPrecision, submillis: u32, ms_of_day: u32, val: u32) { + if prec == SubmillisPrecision::Picoseconds { + assert_eq!(submillis, val); assert_eq!(ms_of_day, 1); } else { panic!("invalid precision {:?}", prec) @@ -2130,32 +2120,38 @@ mod tests { #[test] fn test_addition_with_ps_precision_u16_days_with_subsec_millis() { let mut provider = TimeProvider::new_with_u16_days(0, 0); - provider.set_submillis_precision(SubmillisPrecision::Picoseconds(0)); + provider.set_submillis(SubmillisPrecision::Picoseconds, 0); // 1200 us as ns let duration = Duration::from_nanos(1200 * 10u32.pow(3) as u64); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - check_ps_and_carryover(prec, provider.ms_of_day, 200 * 10_u32.pow(6)); + check_ps_and_carryover( + provider.submillis_precision(), + provider.submillis(), + provider.ms_of_day, + 200 * 10_u32.pow(6), + ); } #[test] fn test_addition_with_ps_precision_u16_days_carryover() { let mut provider = TimeProvider::new_with_u16_days(0, 0); // 800 us as ps - provider.set_submillis_precision(SubmillisPrecision::Picoseconds(800 * 10_u32.pow(6))); + provider.set_submillis(SubmillisPrecision::Picoseconds, 800 * 10_u32.pow(6)); // 400 us as ns let duration = Duration::from_nanos(400 * 10u32.pow(3) as u64); provider += duration; - assert!(provider.submillis_precision().is_some()); - let prec = provider.submillis_precision().unwrap(); - check_ps_and_carryover(prec, provider.ms_of_day, 200 * 10_u32.pow(6)); + check_ps_and_carryover( + provider.submillis_precision(), + provider.submillis(), + provider.ms_of_day, + 200 * 10_u32.pow(6), + ); } #[test] fn test_dyn_creation_u16_days_with_precision() { let mut stamp = TimeProvider::new_with_u16_days(24, 24); - stamp.set_submillis_precision(SubmillisPrecision::Microseconds(666)); + stamp.set_submillis(SubmillisPrecision::Microseconds, 666); let mut buf: [u8; 32] = [0; 32]; stamp.write_to_bytes(&mut buf).unwrap(); let dyn_provider = get_dyn_time_provider_from_bytes(&buf); @@ -2168,10 +2164,11 @@ mod tests { dyn_provider.len_of_day_seg(), LengthOfDaySegment::Short16Bits ); - assert!(dyn_provider.submillis_precision().is_some()); - if let SubmillisPrecision::Microseconds(us) = dyn_provider.submillis_precision().unwrap() { - assert_eq!(us, 666); - } + assert_eq!( + dyn_provider.submillis_precision(), + SubmillisPrecision::Microseconds + ); + assert_eq!(dyn_provider.submillis(), 666); } #[test] @@ -2218,7 +2215,7 @@ mod tests { let stamp0 = TimeProvider::new_with_u24_days(0, 0).unwrap(); let stamp1 = TimeProvider::new_with_u24_days(0, 50000).unwrap(); let mut stamp2 = TimeProvider::new_with_u24_days(0, 50000).unwrap(); - stamp2.set_submillis_precision(SubmillisPrecision::Microseconds(500)); + stamp2.set_submillis(SubmillisPrecision::Microseconds, 500); let stamp3 = TimeProvider::new_with_u24_days(1, 0).unwrap(); assert!(stamp1 > stamp0); assert!(stamp2 > stamp0); @@ -2239,6 +2236,20 @@ mod tests { assert_eq!(stamp_small.ms_of_day(), 500); } + #[test] + fn test_update_from_now() { + let mut stamp = TimeProvider::new_with_u16_days(0, 0); + let _ = stamp.update_from_now(); + let dt = stamp.unix_stamp().as_date_time().unwrap(); + assert!(dt.year() > 2020); + } + + #[test] + fn test_setting_submillis_precision() { + let mut provider = TimeProvider::new_with_u16_days(0, 15); + provider.set_submillis(SubmillisPrecision::Microseconds, 500); + } + #[test] #[cfg(feature = "serde")] fn test_serialization() { diff --git a/src/time/cuc.rs b/src/time/cuc.rs index 252faea..b5dd95b 100644 --- a/src/time/cuc.rs +++ b/src/time/cuc.rs @@ -7,6 +7,7 @@ use chrono::Datelike; use core::fmt::Debug; use core::ops::{Add, AddAssign}; use core::time::Duration; +use core::u64; const MIN_CUC_LEN: usize = 2; @@ -93,10 +94,15 @@ pub fn fractional_part_from_subsec_ns( #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum CucError { InvalidCounterWidth(u8), - InvalidFractionResolution(FractionalResolution), /// Invalid counter supplied. - InvalidCounter(u8, u64), - InvalidFractions(FractionalResolution, u64), + InvalidCounter { + width: u8, + counter: u64, + }, + InvalidFractions { + resolution: FractionalResolution, + value: u64, + }, } impl Display for CucError { @@ -105,14 +111,14 @@ impl Display for CucError { CucError::InvalidCounterWidth(w) => { write!(f, "invalid cuc counter byte width {w}") } - CucError::InvalidFractionResolution(w) => { - write!(f, "invalid cuc fractional part byte width {w:?}") + CucError::InvalidCounter { width, counter } => { + write!(f, "invalid cuc counter {counter} for width {width}") } - CucError::InvalidCounter(w, c) => { - write!(f, "invalid cuc counter {c} for width {w}") - } - CucError::InvalidFractions(w, c) => { - write!(f, "invalid cuc fractional part {c} for width {w:?}") + CucError::InvalidFractions { resolution, value } => { + write!( + f, + "invalid cuc fractional part {value} for resolution {resolution:?}" + ) } } } @@ -278,6 +284,8 @@ impl TimeProviderCcsdsEpoch { .map_err(|e| e.into()) } + /// Generates a CUC timestamp from a UNIX timestamp with a width of 4. This width is able + /// to accomodate all possible UNIX timestamp values. pub fn from_unix_stamp( unix_stamp: &UnixTimestamp, res: FractionalResolution, @@ -289,9 +297,6 @@ impl TimeProviderCcsdsEpoch { unix_stamp.as_date_time().unwrap(), )); } - if ccsds_epoch > u32::MAX as i64 { - return Err(CucError::InvalidCounter(4, ccsds_epoch as u64).into()); - } let mut fractions = None; if let Some(subsec_millis) = unix_stamp.subsecond_millis { fractions = fractional_part_from_subsec_ns(res, subsec_millis as u64 * 10_u64.pow(6)); @@ -308,12 +313,19 @@ impl TimeProviderCcsdsEpoch { self.counter } + pub fn width(&self) -> u8 { + self.counter.0 + } + + pub fn counter(&self) -> u32 { + self.counter.1 + } + pub fn width_fractions_pair(&self) -> Option { self.fractions } pub fn set_fractions(&mut self, fractions: FractionalPart) -> Result<(), CucError> { - Self::verify_fractions_width(fractions.0)?; Self::verify_fractions_value(fractions)?; self.fractions = Some(fractions); self.update_p_field_fractions(); @@ -343,10 +355,12 @@ impl TimeProviderCcsdsEpoch { ) -> Result { Self::verify_counter_width(counter.0)?; if counter.1 > (2u64.pow(counter.0 as u32 * 8) - 1) as u32 { - return Err(CucError::InvalidCounter(counter.0, counter.1 as u64)); + return Err(CucError::InvalidCounter { + width: counter.0, + counter: counter.1 as u64, + }); } if let Some(fractions) = fractions { - Self::verify_fractions_width(fractions.0)?; Self::verify_fractions_value(fractions)?; } Ok(Self { @@ -432,16 +446,12 @@ impl TimeProviderCcsdsEpoch { Ok(()) } - fn verify_fractions_width(width: FractionalResolution) -> Result<(), CucError> { - if width as u8 > 3 { - return Err(CucError::InvalidFractionResolution(width)); - } - Ok(()) - } - fn verify_fractions_value(val: FractionalPart) -> Result<(), CucError> { if val.1 > 2u32.pow((val.0 as u32) * 8) - 1 { - return Err(CucError::InvalidFractions(val.0, val.1 as u64)); + return Err(CucError::InvalidFractions { + resolution: val.0, + value: val.1 as u64, + }); } Ok(()) } @@ -713,6 +723,7 @@ impl Add for &TimeProviderCcsdsEpoch { #[cfg(test)] mod tests { use super::*; + use alloc::string::ToString; use chrono::{Datelike, Timelike}; #[allow(unused_imports)] use std::println; @@ -721,6 +732,8 @@ mod tests { fn test_basic_zero_epoch() { let zero_cuc = TimeProviderCcsdsEpoch::new(0); assert_eq!(zero_cuc.len_as_bytes(), 5); + assert_eq!(zero_cuc.width(), zero_cuc.width_counter_pair().0); + assert_eq!(zero_cuc.counter(), zero_cuc.width_counter_pair().1); assert_eq!(zero_cuc.ccdsd_time_code(), CcsdsTimeCodes::CucCcsdsEpoch); let counter = zero_cuc.width_counter_pair(); assert_eq!(counter.0, 4); @@ -861,6 +874,7 @@ mod tests { #[test] fn invalid_buf_len_for_read() {} + #[test] fn write_read_three_byte_cntr_stamp() { let mut buf = [0; 4]; @@ -1102,12 +1116,7 @@ mod tests { assert_eq!(fractions.1, 2_u32.pow(3 * 8) - 2); } - #[test] - fn add_duration_basic() { - let mut cuc_stamp = TimeProviderCcsdsEpoch::new(200); - cuc_stamp.set_fractional_resolution(FractionalResolution::FifteenUs); - let duration = Duration::from_millis(2500); - cuc_stamp += duration; + fn check_stamp_after_addition(cuc_stamp: &TimeProviderCcsdsEpoch) { assert_eq!(cuc_stamp.width_counter_pair().1, 202); let fractions = cuc_stamp.width_fractions_pair().unwrap().1; let expected_val = @@ -1120,6 +1129,41 @@ mod tests { assert!(cuc_stamp2.subsecond_millis().unwrap() <= 1); } + #[test] + fn add_duration_basic() { + let mut cuc_stamp = TimeProviderCcsdsEpoch::new(200); + cuc_stamp.set_fractional_resolution(FractionalResolution::FifteenUs); + let duration = Duration::from_millis(2500); + cuc_stamp += duration; + check_stamp_after_addition(&cuc_stamp); + } + + #[test] + fn add_duration_basic_on_ref() { + let mut cuc_stamp = TimeProviderCcsdsEpoch::new(200); + cuc_stamp.set_fractional_resolution(FractionalResolution::FifteenUs); + let duration = Duration::from_millis(2500); + let new_stamp = cuc_stamp + duration; + check_stamp_after_addition(&new_stamp); + } + + #[test] + fn add_duration_basic_no_fractions() { + let mut cuc_stamp = TimeProviderCcsdsEpoch::new(200); + let duration = Duration::from_millis(2000); + cuc_stamp += duration; + assert_eq!(cuc_stamp.counter(), 202); + assert_eq!(cuc_stamp.width_fractions_pair(), None); + } + + #[test] + fn add_duration_basic_on_ref_no_fractions() { + let cuc_stamp = TimeProviderCcsdsEpoch::new(200); + let duration = Duration::from_millis(2000); + let new_stamp = cuc_stamp + duration; + assert_eq!(new_stamp.counter(), 202); + assert_eq!(new_stamp.width_fractions_pair(), None); + } #[test] fn add_duration_overflow() { let mut cuc_stamp = @@ -1128,4 +1172,58 @@ mod tests { cuc_stamp += duration; assert_eq!(cuc_stamp.counter.1, 10); } + + #[test] + fn test_invalid_width_param() { + let error = TimeProviderCcsdsEpoch::new_generic(WidthCounterPair(8, 0), None); + assert!(error.is_err()); + let error = error.unwrap_err(); + if let CucError::InvalidCounterWidth(width) = error { + assert_eq!(width, 8); + assert_eq!(error.to_string(), "invalid cuc counter byte width 8"); + } else { + panic!("unexpected error: {}", error); + } + } + + #[test] + fn test_from_dt() { + let dt = Utc.with_ymd_and_hms(2021, 1, 1, 0, 0, 0).unwrap(); + let cuc = + TimeProviderCcsdsEpoch::from_date_time(&dt, FractionalResolution::Seconds).unwrap(); + assert_eq!(cuc.counter(), dt.timestamp() as u32); + } + + #[test] + fn test_new_u16_width() { + let cuc = TimeProviderCcsdsEpoch::new_u16_counter(0); + assert_eq!(cuc.width(), 2); + assert_eq!(cuc.counter(), 0); + } + + #[test] + fn from_unix_stamp() { + let unix_stamp = UnixTimestamp::new(0, 0).unwrap(); + let cuc = + TimeProviderCcsdsEpoch::from_unix_stamp(&unix_stamp, FractionalResolution::Seconds) + .expect("failed to create cuc from unix stamp"); + assert_eq!( + cuc.counter(), + (-DAYS_CCSDS_TO_UNIX * SECONDS_PER_DAY as i32) as u32 + ); + } + + #[test] + fn test_invalid_counter() { + let cuc_error = TimeProviderCcsdsEpoch::new_generic(WidthCounterPair(1, 256), None); + assert!(cuc_error.is_err()); + let cuc_error = cuc_error.unwrap_err(); + if let CucError::InvalidCounter { width, counter } = cuc_error { + assert_eq!(width, 1); + assert_eq!(counter, 256); + assert_eq!(cuc_error.to_string(), "invalid cuc counter 256 for width 1"); + } else { + panic!("unexpected error: {}", cuc_error); + } + } } diff --git a/src/time/mod.rs b/src/time/mod.rs index 5dedf99..3376677 100644 --- a/src/time/mod.rs +++ b/src/time/mod.rs @@ -82,13 +82,13 @@ impl Display for TimestampError { ) } TimestampError::Cds(e) => { - write!(f, "cds error {e}") + write!(f, "cds error: {e}") } TimestampError::Cuc(e) => { - write!(f, "cuc error {e}") + write!(f, "cuc error: {e}") } TimestampError::ByteConversion(e) => { - write!(f, "byte conversion error {e}") + write!(f, "time stamp: {e}") } TimestampError::DateBeforeCcsdsEpoch(e) => { write!(f, "datetime with date before ccsds epoch: {e}") @@ -412,7 +412,11 @@ impl Add for &UnixTimestamp { #[cfg(all(test, feature = "std"))] mod tests { - use super::*; + use alloc::string::ToString; + use chrono::{Datelike, Timelike}; + use std::format; + + use super::{cuc::CucError, *}; #[test] fn test_days_conversion() { @@ -426,6 +430,14 @@ mod tests { assert!(sec_floats > 0.0); } + #[test] + fn test_ms_of_day() { + let ms = ms_of_day(0.0); + assert_eq!(ms, 0); + let ms = ms_of_day(5.0); + assert_eq!(ms, 5000); + } + #[test] fn test_ccsds_epoch() { let now = SystemTime::now() @@ -534,6 +546,25 @@ mod tests { assert!(stamp1.subsecond_millis().is_none()); } + #[test] + fn test_as_dt() { + let stamp = UnixTimestamp::new_only_seconds(0); + let dt = stamp.as_date_time().unwrap(); + assert_eq!(dt.year(), 1970); + assert_eq!(dt.month(), 1); + assert_eq!(dt.day(), 1); + assert_eq!(dt.hour(), 0); + assert_eq!(dt.minute(), 0); + assert_eq!(dt.second(), 0); + } + + #[test] + fn test_from_now() { + let stamp_now = UnixTimestamp::from_now().unwrap(); + let dt_now = stamp_now.as_date_time().unwrap(); + assert!(dt_now.year() >= 2020); + } + #[test] fn test_addition_spillover() { let mut stamp0 = UnixTimestamp::new(1, 900).unwrap(); @@ -544,4 +575,11 @@ mod tests { assert_eq!(stamp0.unix_seconds, 3); assert_eq!(stamp0.subsecond_millis().unwrap(), 100); } + + #[test] + fn test_cuc_error_printout() { + let cuc_error = CucError::InvalidCounterWidth(12); + let stamp_error = TimestampError::from(cuc_error); + assert_eq!(stamp_error.to_string(), format!("cuc error: {cuc_error}")); + } } diff --git a/src/util.rs b/src/util.rs index b944763..88298f4 100644 --- a/src/util.rs +++ b/src/util.rs @@ -207,17 +207,21 @@ impl UnsignedEnum for UnsignedByteField { #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct GenericUnsignedByteField { +pub struct GenericUnsignedByteField { value: TYPE, } -impl GenericUnsignedByteField { +impl GenericUnsignedByteField { pub const fn new(val: TYPE) -> Self { Self { value: val } } + + pub const fn value(&self) -> TYPE { + self.value + } } -impl UnsignedEnum for GenericUnsignedByteField { +impl UnsignedEnum for GenericUnsignedByteField { fn size(&self) -> usize { self.value.written_len() } @@ -344,8 +348,8 @@ pub mod tests { .expect("writing to raw buffer failed"); assert_eq!(len, 1); assert_eq!(buf[0], 5); - for i in 1..8 { - assert_eq!(buf[i], 0); + for val in buf.iter().skip(1) { + assert_eq!(*val, 0); } } @@ -360,8 +364,8 @@ pub mod tests { assert_eq!(len, 2); let raw_val = u16::from_be_bytes(buf[0..2].try_into().unwrap()); assert_eq!(raw_val, 3823); - for i in 2..8 { - assert_eq!(buf[i], 0); + for val in buf.iter().skip(2) { + assert_eq!(*val, 0); } } @@ -376,9 +380,9 @@ pub mod tests { assert_eq!(len, 4); let raw_val = u32::from_be_bytes(buf[0..4].try_into().unwrap()); assert_eq!(raw_val, 80932); - for i in 4..8 { + (4..8).for_each(|i| { assert_eq!(buf[i], 0); - } + }); } #[test] @@ -544,8 +548,8 @@ pub mod tests { .expect("writing to raw buffer failed"); let raw_val = u16::from_be_bytes(buf[0..2].try_into().unwrap()); assert_eq!(raw_val, 3823); - for i in 2..8 { - assert_eq!(buf[i], 0); + for val in buf.iter().skip(2) { + assert_eq!(*val, 0); } }