Compare commits
27 Commits
v0.11.0
...
81423fc6e8
Author | SHA1 | Date | |
---|---|---|---|
81423fc6e8 | |||
a399b11a8e | |||
9d4c7446a3 | |||
b87f7d73b1 | |||
80744eea16 | |||
a5918bfd4a | |||
0e347b0e37 | |||
58dabb6f2f
|
|||
7fd65aa592
|
|||
0024afc83e | |||
c48bd848d3
|
|||
b8be9ae641 | |||
c2506dbba9 | |||
b842b9d11a | |||
374c034e92 | |||
791c7f6e02 | |||
8001938507 | |||
73ab7ff148 | |||
c59d01174f
|
|||
eb49bff0c9 | |||
af392d40d0
|
|||
b78bfe2114
|
|||
69a3b1d8f3
|
|||
e7b3ba9575 | |||
c515535ccd
|
|||
95158a8cd2 | |||
8b1ccb0cd0
|
115
.github/workflows/ci.yml
vendored
115
.github/workflows/ci.yml
vendored
@ -1,42 +1,39 @@
|
||||
on: [push]
|
||||
|
||||
name: ci
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
name: Check build
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --release
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo check --release
|
||||
|
||||
msrv:
|
||||
name: Check with MSRV
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.65.0
|
||||
override: true
|
||||
profile: minimal
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --release
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- run: cargo nextest run --all-features
|
||||
- run: cargo test --doc
|
||||
|
||||
msrv:
|
||||
name: Check MSRV
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.68.2
|
||||
- run: cargo check --release
|
||||
|
||||
cross-check:
|
||||
name: Check Cross
|
||||
name: Check Cross-Compilation
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@ -44,70 +41,32 @@ jobs:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- thumbv7em-none-eabihf
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: true
|
||||
command: check
|
||||
args: --release --target=${{ matrix.target }} --no-default-features
|
||||
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
|
||||
- run: cargo check --release --target=${{matrix.target}} --no-default-features
|
||||
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
name: Check formatting
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo fmt --all -- --check
|
||||
|
||||
check-doc:
|
||||
docs:
|
||||
name: Check Documentation Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: doc
|
||||
args: --all-features
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
- run: RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
- run: rustup component add clippy
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
ci:
|
||||
if: ${{ success() }}
|
||||
# all new jobs must be added to this list
|
||||
needs: [check, fmt, clippy]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: CI succeeded
|
||||
run: exit 0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
36
CHANGELOG.md
36
CHANGELOG.md
@ -8,6 +8,42 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [unreleased]
|
||||
|
||||
# [v0.12.0]
|
||||
|
||||
- Minor documentation build updates.
|
||||
|
||||
## Added
|
||||
|
||||
- Added new `cfdp::tlv::TlvOwned` type which erases the lifetime and is clonable.
|
||||
- Dedicated `cfdp::tlv::TlvLvDataTooLarge` error struct for APIs where this is the only possible
|
||||
API error.
|
||||
|
||||
## Added and Changed
|
||||
|
||||
- Added new `ReadableTlv` to avoid some boilerplate code and have a common abstraction implemented
|
||||
for both `Tlv` and `TlvOwned` to read the raw TLV data field and its length.
|
||||
- Replaced `cfdp::tlv::TlvLvError` by `cfdp::tlv::TlvLvDataTooLarge` where applicable.
|
||||
|
||||
# [v0.11.2] 2024-05-19
|
||||
|
||||
- Bumped MSRV to 1.68.2
|
||||
|
||||
## Fixed
|
||||
|
||||
- Removed `defmt::Format` impl for `MetadataPduCreator` which seems to be problematic.
|
||||
|
||||
# [v0.11.1] 2024-04-22
|
||||
|
||||
## Fixed
|
||||
|
||||
- The default data length for for `SpHeader` constructors where the data field length is not
|
||||
specified is now 0.
|
||||
- The `SpHeader::new_from_fields` is public now.
|
||||
|
||||
## Added
|
||||
|
||||
- `SpHeader::to_vec` method.
|
||||
|
||||
# [v0.11.0] 2024-04-16
|
||||
|
||||
## Changed
|
||||
|
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "spacepackets"
|
||||
version = "0.11.0"
|
||||
version = "0.12.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.65"
|
||||
rust-version = "1.68.2"
|
||||
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
|
||||
description = "Generic implementations for various CCSDS and ECSS packet standards"
|
||||
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
|
||||
@ -60,11 +60,11 @@ chrono = "0.4"
|
||||
default = ["std"]
|
||||
std = ["chrono/std", "chrono/clock", "alloc", "thiserror"]
|
||||
serde = ["dep:serde", "chrono/serde"]
|
||||
alloc = ["postcard/alloc", "chrono/alloc"]
|
||||
alloc = ["postcard/alloc", "chrono/alloc", "defmt/alloc", "serde/alloc"]
|
||||
chrono = ["dep:chrono"]
|
||||
timelib = ["dep:time"]
|
||||
defmt = ["dep:defmt"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docs_rs", "--generate-link-to-definition"]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
10
README.md
10
README.md
@ -61,3 +61,13 @@ cargo install grcov --locked
|
||||
|
||||
After that, you can simply run `coverage.py` to test the project with coverage. You can optionally
|
||||
supply the `--open` flag to open the coverage report in your webbrowser.
|
||||
|
||||
# Miri
|
||||
|
||||
You can run the [`miri`](https://github.com/rust-lang/miri) tool on this library to check for
|
||||
undefined behaviour (UB). This library does not use use any `unsafe` code blocks, but `miri` could
|
||||
still catch UB from used libraries.
|
||||
|
||||
```sh
|
||||
cargo +nightly miri nextest run --all-features
|
||||
```
|
||||
|
4
automation/Jenkinsfile
vendored
4
automation/Jenkinsfile
vendored
@ -21,7 +21,9 @@ pipeline {
|
||||
}
|
||||
stage('Docs') {
|
||||
steps {
|
||||
sh 'cargo +nightly doc --all-features'
|
||||
sh """
|
||||
RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features
|
||||
"""
|
||||
}
|
||||
}
|
||||
stage('Rustfmt') {
|
||||
|
@ -4,7 +4,9 @@ Checklist for new releases
|
||||
# Pre-Release
|
||||
|
||||
1. Make sure any new modules are documented sufficiently enough and check docs with
|
||||
`cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docs_rs"]' --open`.
|
||||
`RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options" cargo +nightly doc --all-features --open`
|
||||
or `cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docsrs" --generate-link-to-definition"]' --open`
|
||||
(was problematic on more recent nightly versions).
|
||||
2. Bump version specifier in `Cargo.toml`.
|
||||
3. Update `CHANGELOG.md`: Convert `unreleased` section into version section with date and add new
|
||||
`unreleased` section.
|
||||
|
@ -1,5 +1,4 @@
|
||||
//! Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
|
||||
use crate::cfdp::TlvLvError;
|
||||
use crate::ByteConversionError;
|
||||
use core::str::Utf8Error;
|
||||
#[cfg(feature = "serde")]
|
||||
@ -7,6 +6,8 @@ use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "std")]
|
||||
use std::string::String;
|
||||
|
||||
use super::TlvLvDataTooLarge;
|
||||
|
||||
pub const MIN_LV_LEN: usize = 1;
|
||||
|
||||
/// Generic CFDP length-value (LV) abstraction as specified in CFDP 5.1.8.
|
||||
@ -63,9 +64,9 @@ pub(crate) fn generic_len_check_deserialization(
|
||||
|
||||
impl<'data> Lv<'data> {
|
||||
#[inline]
|
||||
pub fn new(data: &[u8]) -> Result<Lv, TlvLvError> {
|
||||
pub fn new(data: &[u8]) -> Result<Lv, TlvLvDataTooLarge> {
|
||||
if data.len() > u8::MAX as usize {
|
||||
return Err(TlvLvError::DataTooLarge(data.len()));
|
||||
return Err(TlvLvDataTooLarge(data.len()));
|
||||
}
|
||||
Ok(Lv {
|
||||
data,
|
||||
@ -85,7 +86,7 @@ impl<'data> Lv<'data> {
|
||||
/// Helper function to build a string LV. This is especially useful for the file or directory
|
||||
/// path LVs
|
||||
#[inline]
|
||||
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvError> {
|
||||
pub fn new_from_str(str_slice: &str) -> Result<Lv, TlvLvDataTooLarge> {
|
||||
Self::new(str_slice.as_bytes())
|
||||
}
|
||||
|
||||
@ -93,7 +94,7 @@ impl<'data> Lv<'data> {
|
||||
/// path LVs
|
||||
#[cfg(feature = "std")]
|
||||
#[inline]
|
||||
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvError> {
|
||||
pub fn new_from_string(string: &'data String) -> Result<Lv<'data>, TlvLvDataTooLarge> {
|
||||
Self::new(string.as_bytes())
|
||||
}
|
||||
|
||||
@ -177,10 +178,10 @@ impl<'data> Lv<'data> {
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
use alloc::string::ToString;
|
||||
|
||||
use crate::cfdp::TlvLvError;
|
||||
use super::*;
|
||||
|
||||
use crate::ByteConversionError;
|
||||
use std::string::String;
|
||||
|
||||
@ -271,15 +272,11 @@ pub mod tests {
|
||||
let lv = Lv::new(&data_big);
|
||||
assert!(lv.is_err());
|
||||
let error = lv.unwrap_err();
|
||||
if let TlvLvError::DataTooLarge(size) = error {
|
||||
assert_eq!(size, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
} else {
|
||||
panic!("invalid exception {:?}", error)
|
||||
}
|
||||
assert_eq!(error.0, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -176,11 +176,30 @@ impl Default for ChecksumType {
|
||||
|
||||
pub const NULL_CHECKSUM_U32: [u8; 4] = [0; 4];
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub struct TlvLvDataTooLarge(pub usize);
|
||||
|
||||
impl Display for TlvLvDataTooLarge {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"data with size {} larger than allowed {} bytes",
|
||||
self.0,
|
||||
u8::MAX
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Error for TlvLvDataTooLarge {}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub enum TlvLvError {
|
||||
DataTooLarge(usize),
|
||||
DataTooLarge(TlvLvDataTooLarge),
|
||||
ByteConversion(ByteConversionError),
|
||||
/// First value: Found value. Second value: Expected value if there is one.
|
||||
InvalidTlvTypeField {
|
||||
@ -197,6 +216,12 @@ pub enum TlvLvError {
|
||||
InvalidFilestoreActionCode(u8),
|
||||
}
|
||||
|
||||
impl From<TlvLvDataTooLarge> for TlvLvError {
|
||||
fn from(value: TlvLvDataTooLarge) -> Self {
|
||||
Self::DataTooLarge(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ByteConversionError> for TlvLvError {
|
||||
fn from(value: ByteConversionError) -> Self {
|
||||
Self::ByteConversion(value)
|
||||
@ -206,13 +231,8 @@ impl From<ByteConversionError> for TlvLvError {
|
||||
impl Display for TlvLvError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
TlvLvError::DataTooLarge(data_len) => {
|
||||
write!(
|
||||
f,
|
||||
"data with size {} larger than allowed {} bytes",
|
||||
data_len,
|
||||
u8::MAX
|
||||
)
|
||||
TlvLvError::DataTooLarge(e) => {
|
||||
write!(f, "{}", e)
|
||||
}
|
||||
TlvLvError::ByteConversion(e) => {
|
||||
write!(f, "tlv or lv byte conversion: {}", e)
|
||||
@ -240,6 +260,7 @@ impl Display for TlvLvError {
|
||||
impl Error for TlvLvError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match self {
|
||||
TlvLvError::DataTooLarge(e) => Some(e),
|
||||
TlvLvError::ByteConversion(e) => Some(e),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::tlv::ReadableTlv;
|
||||
use super::{CfdpPdu, WritablePduPacket};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]
|
||||
|
@ -11,6 +11,7 @@ use alloc::vec::Vec;
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::tlv::ReadableTlv;
|
||||
use super::{CfdpPdu, WritablePduPacket};
|
||||
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -56,7 +57,6 @@ pub fn build_metadata_opts_from_vec(
|
||||
/// This abstraction exposes a specialized API for creating metadata PDUs as specified in
|
||||
/// CFDP chapter 5.2.5.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> {
|
||||
pdu_header: PduHeader,
|
||||
metadata_params: MetadataGenericParams,
|
||||
@ -355,7 +355,7 @@ pub mod tests {
|
||||
};
|
||||
use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket};
|
||||
use crate::cfdp::pdu::{FileDirectiveType, PduHeader};
|
||||
use crate::cfdp::tlv::{Tlv, TlvType};
|
||||
use crate::cfdp::tlv::{ReadableTlv, Tlv, TlvType};
|
||||
use crate::cfdp::{
|
||||
ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag,
|
||||
SegmentationControl, TransmissionMode,
|
||||
|
@ -9,10 +9,14 @@ use crate::ByteConversionError;
|
||||
use alloc::vec;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
#[cfg(feature = "alloc")]
|
||||
pub use alloc_mod::*;
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::TlvLvDataTooLarge;
|
||||
|
||||
pub mod msg_to_user;
|
||||
|
||||
pub const MIN_TLV_LEN: usize = 2;
|
||||
@ -39,6 +43,26 @@ pub trait GenericTlv {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ReadableTlv {
|
||||
fn value(&self) -> &[u8];
|
||||
|
||||
/// Checks whether the value field is empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
self.value().is_empty()
|
||||
}
|
||||
|
||||
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
|
||||
/// [Self::value]
|
||||
fn len_value(&self) -> usize {
|
||||
self.value().len()
|
||||
}
|
||||
|
||||
/// Returns the full raw length, including the length byte.
|
||||
fn len_full(&self) -> usize {
|
||||
self.len_value() + 2
|
||||
}
|
||||
}
|
||||
|
||||
pub trait WritableTlv {
|
||||
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
|
||||
fn len_written(&self) -> usize;
|
||||
@ -129,14 +153,14 @@ pub struct Tlv<'data> {
|
||||
}
|
||||
|
||||
impl<'data> Tlv<'data> {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvError> {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Tlv, TlvLvDataTooLarge> {
|
||||
Ok(Tlv {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
lv: Lv::new(data)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvError> {
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Tlv, TlvLvDataTooLarge> {
|
||||
Ok(Tlv {
|
||||
tlv_type_field: TlvTypeField::Custom(tlv_type),
|
||||
lv: Lv::new(data)?,
|
||||
@ -151,26 +175,6 @@ impl<'data> Tlv<'data> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &[u8] {
|
||||
self.lv.value()
|
||||
}
|
||||
|
||||
/// Checks whether the value field is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.value().is_empty()
|
||||
}
|
||||
|
||||
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
|
||||
/// [Self::value]
|
||||
pub fn len_value(&self) -> usize {
|
||||
self.value().len()
|
||||
}
|
||||
|
||||
/// Returns the full raw length, including the length byte.
|
||||
pub fn len_full(&self) -> usize {
|
||||
self.len_value() + 2
|
||||
}
|
||||
|
||||
/// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the
|
||||
/// bytestream with the exact size of the expected TLV. This function will take care
|
||||
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using
|
||||
@ -192,6 +196,27 @@ impl<'data> Tlv<'data> {
|
||||
pub fn raw_data(&self) -> Option<&[u8]> {
|
||||
self.lv.raw_data()
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
TlvOwned {
|
||||
tlv_type_field: self.tlv_type_field,
|
||||
data: self.value().to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
impl PartialEq<TlvOwned> for Tlv<'_> {
|
||||
fn eq(&self, other: &TlvOwned) -> bool {
|
||||
self.tlv_type_field == other.tlv_type_field && self.value() == other.value()
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadableTlv for Tlv<'_> {
|
||||
fn value(&self) -> &[u8] {
|
||||
self.lv.value()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for Tlv<'_> {
|
||||
@ -212,18 +237,98 @@ impl GenericTlv for Tlv<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
|
||||
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
|
||||
found: raw_type,
|
||||
expected: Some(expected_tlv_type.into()),
|
||||
})?;
|
||||
if tlv_type != expected_tlv_type {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(expected_tlv_type as u8),
|
||||
});
|
||||
#[cfg(feature = "alloc")]
|
||||
pub mod alloc_mod {
|
||||
use crate::cfdp::TlvLvDataTooLarge;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime
|
||||
/// associated to a data slice.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||
pub struct TlvOwned {
|
||||
pub(crate) tlv_type_field: TlvTypeField,
|
||||
pub(crate) data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl TlvOwned {
|
||||
pub fn new(tlv_type: TlvType, data: &[u8]) -> Result<Self, TlvLvDataTooLarge> {
|
||||
if data.len() > u8::MAX as usize {
|
||||
return Err(TlvLvDataTooLarge(data.len()));
|
||||
}
|
||||
Ok(Self {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
data: data.to_vec(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Result<Self, TlvLvDataTooLarge> {
|
||||
if data.len() > u8::MAX as usize {
|
||||
return Err(TlvLvDataTooLarge(data.len()));
|
||||
}
|
||||
Ok(Self {
|
||||
tlv_type_field: TlvTypeField::Custom(tlv_type),
|
||||
data: data.to_vec(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a TLV with an empty value field.
|
||||
pub fn new_empty(tlv_type: TlvType) -> Self {
|
||||
Self {
|
||||
tlv_type_field: TlvTypeField::Standard(tlv_type),
|
||||
data: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_tlv(&self) -> Tlv<'_> {
|
||||
Tlv {
|
||||
tlv_type_field: self.tlv_type_field,
|
||||
// The API should ensure that the data length is never to large, so the unwrap for the
|
||||
// LV creation should never be an issue.
|
||||
lv: Lv::new(&self.data).expect("lv creation failed unexpectedly"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadableTlv for TlvOwned {
|
||||
fn value(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for TlvOwned {
|
||||
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
|
||||
generic_len_check_data_serialization(buf, self.data.len(), MIN_TLV_LEN)?;
|
||||
buf[0] = self.tlv_type_field.into();
|
||||
buf[1] = self.data.len() as u8;
|
||||
buf[2..2 + self.data.len()].copy_from_slice(&self.data);
|
||||
Ok(self.len_written())
|
||||
}
|
||||
|
||||
fn len_written(&self) -> usize {
|
||||
self.data.len() + 2
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericTlv for TlvOwned {
|
||||
fn tlv_type_field(&self) -> TlvTypeField {
|
||||
self.tlv_type_field
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Tlv<'_>> for TlvOwned {
|
||||
fn from(value: Tlv<'_>) -> Self {
|
||||
value.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Tlv<'_>> for TlvOwned {
|
||||
fn eq(&self, other: &Tlv) -> bool {
|
||||
self.tlv_type_field == other.tlv_type_field && self.data == other.value()
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -238,7 +343,7 @@ impl EntityIdTlv {
|
||||
Self { entity_id }
|
||||
}
|
||||
|
||||
fn len_check(buf: &[u8]) -> Result<(), ByteConversionError> {
|
||||
fn check_min_len(buf: &[u8]) -> Result<(), ByteConversionError> {
|
||||
if buf.len() < 2 {
|
||||
return Err(ByteConversionError::ToSliceTooSmall {
|
||||
found: buf.len(),
|
||||
@ -261,7 +366,7 @@ impl EntityIdTlv {
|
||||
}
|
||||
|
||||
pub fn from_bytes(buf: &[u8]) -> Result<Self, TlvLvError> {
|
||||
Self::len_check(buf)?;
|
||||
Self::check_min_len(buf)?;
|
||||
verify_tlv_type(buf[0], TlvType::EntityId)?;
|
||||
let len = buf[1];
|
||||
if len != 1 && len != 2 && len != 4 && len != 8 {
|
||||
@ -272,22 +377,31 @@ impl EntityIdTlv {
|
||||
Ok(Self { entity_id })
|
||||
}
|
||||
|
||||
/// Convert to a generic [Tlv], which also erases the programmatic type information.
|
||||
/// Convert to a generic [Tlv], which also erases the type information.
|
||||
pub fn to_tlv(self, buf: &mut [u8]) -> Result<Tlv, ByteConversionError> {
|
||||
Self::len_check(buf)?;
|
||||
Self::check_min_len(buf)?;
|
||||
self.entity_id
|
||||
.write_to_be_bytes(&mut buf[2..2 + self.entity_id.size()])?;
|
||||
Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).map_err(|e| match e {
|
||||
TlvLvError::ByteConversion(e) => e,
|
||||
// All other errors are impossible.
|
||||
_ => panic!("unexpected TLV error"),
|
||||
})
|
||||
if buf.len() < self.len_value() {
|
||||
return Err(ByteConversionError::ToSliceTooSmall {
|
||||
found: buf.len(),
|
||||
expected: self.len_value(),
|
||||
});
|
||||
}
|
||||
// We performed all checks necessary to ensure this call never panics.
|
||||
Ok(Tlv::new(TlvType::EntityId, &buf[2..2 + self.entity_id.size()]).unwrap())
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
// Unwrap is okay here, entity ID should never be larger than maximum allowed size.
|
||||
TlvOwned::new(TlvType::EntityId, &self.entity_id.to_vec()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for EntityIdTlv {
|
||||
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
|
||||
Self::len_check(buf)?;
|
||||
Self::check_min_len(buf)?;
|
||||
buf[0] = TlvType::EntityId as u8;
|
||||
buf[1] = self.entity_id.size() as u8;
|
||||
Ok(2 + self.entity_id.write_to_be_bytes(&mut buf[2..])?)
|
||||
@ -526,6 +640,12 @@ impl<'first_name, 'second_name> FilestoreRequestTlv<'first_name, 'second_name> {
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
// The API should ensure the data field is never too large, so unwrapping here is okay.
|
||||
TlvOwned::new(TlvType::FilestoreRequest, &self.to_vec()[2..]).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for FilestoreRequestTlv<'_, '_> {
|
||||
@ -711,6 +831,12 @@ impl<'first_name, 'second_name, 'fs_msg> FilestoreResponseTlv<'first_name, 'seco
|
||||
filestore_message,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
// The API should ensure the data field is never too large, so unwrap is okay here.
|
||||
TlvOwned::new(TlvType::FilestoreResponse, &self.to_vec()[2..]).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for FilestoreResponseTlv<'_, '_, '_> {
|
||||
@ -752,6 +878,20 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
|
||||
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
|
||||
found: raw_type,
|
||||
expected: Some(expected_tlv_type.into()),
|
||||
})?;
|
||||
if tlv_type != expected_tlv_type {
|
||||
return Err(TlvLvError::InvalidTlvTypeField {
|
||||
found: tlv_type as u8,
|
||||
expected: Some(expected_tlv_type as u8),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -939,15 +1079,11 @@ mod tests {
|
||||
let tlv_res = Tlv::new(TlvType::MsgToUser, &buf_too_large);
|
||||
assert!(tlv_res.is_err());
|
||||
let error = tlv_res.unwrap_err();
|
||||
if let TlvLvError::DataTooLarge(size) = error {
|
||||
assert_eq!(size, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
} else {
|
||||
panic!("unexpected error {:?}", error);
|
||||
}
|
||||
assert_eq!(error.0, u8::MAX as usize + 1);
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"data with size 256 larger than allowed 255 bytes"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1300,4 +1436,71 @@ mod tests {
|
||||
assert_eq!(tlv_as_vec[0], 20);
|
||||
assert_eq!(tlv_as_vec[1], 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tlv_to_owned() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
|
||||
assert!(tlv_res.is_ok());
|
||||
let tlv_res = tlv_res.unwrap();
|
||||
let tlv_owned = tlv_res.to_owned();
|
||||
assert_eq!(tlv_res, tlv_owned);
|
||||
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
|
||||
assert_eq!(tlv_owned_from_conversion, tlv_owned);
|
||||
assert_eq!(tlv_owned_from_conversion, tlv_res);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]).expect("creating TLV failed");
|
||||
assert_eq!(
|
||||
tlv_res.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::EntityId)
|
||||
);
|
||||
assert_eq!(tlv_res.len_full(), 3);
|
||||
assert_eq!(tlv_res.value().len(), 1);
|
||||
assert_eq!(tlv_res.len_value(), 1);
|
||||
assert!(!tlv_res.is_empty());
|
||||
assert_eq!(tlv_res.value()[0], 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_empty() {
|
||||
let tlv_res = TlvOwned::new_empty(TlvType::FlowLabel);
|
||||
assert_eq!(
|
||||
tlv_res.tlv_type_field(),
|
||||
TlvTypeField::Standard(TlvType::FlowLabel)
|
||||
);
|
||||
assert_eq!(tlv_res.len_full(), 2);
|
||||
assert_eq!(tlv_res.value().len(), 0);
|
||||
assert_eq!(tlv_res.len_value(), 0);
|
||||
assert!(tlv_res.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_custom_type() {
|
||||
let tlv_res = TlvOwned::new_with_custom_type(32, &[]).unwrap();
|
||||
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
|
||||
assert_eq!(tlv_res.len_full(), 2);
|
||||
assert_eq!(tlv_res.value().len(), 0);
|
||||
assert_eq!(tlv_res.len_value(), 0);
|
||||
assert!(tlv_res.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_tlv_conversion_to_bytes() {
|
||||
let entity_id = UbfU8::new(5);
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
|
||||
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
|
||||
assert!(tlv_res.is_ok());
|
||||
let tlv_res = tlv_res.unwrap();
|
||||
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
|
||||
assert_eq!(tlv_res.to_vec(), tlv_owned_from_conversion.to_vec());
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
//! Abstractions for the Message to User CFDP TLV subtype.
|
||||
use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
|
||||
use crate::ByteConversionError;
|
||||
#[cfg(feature = "alloc")]
|
||||
use super::TlvOwned;
|
||||
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
|
||||
use crate::{cfdp::TlvLvDataTooLarge, ByteConversionError};
|
||||
use delegate::delegate;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@ -10,7 +12,7 @@ pub struct MsgToUserTlv<'data> {
|
||||
|
||||
impl<'data> MsgToUserTlv<'data> {
|
||||
/// Create a new message to user TLV where the type field is set correctly.
|
||||
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvError> {
|
||||
pub fn new(value: &'data [u8]) -> Result<MsgToUserTlv<'data>, TlvLvDataTooLarge> {
|
||||
Ok(Self {
|
||||
tlv: Tlv::new(TlvType::MsgToUser, value)?,
|
||||
})
|
||||
@ -75,6 +77,11 @@ impl<'data> MsgToUserTlv<'data> {
|
||||
}
|
||||
Ok(msg_to_user)
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_owned(&self) -> TlvOwned {
|
||||
self.tlv.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableTlv for MsgToUserTlv<'_> {
|
||||
|
39
src/lib.rs
39
src/lib.rs
@ -55,7 +55,7 @@
|
||||
//! println!("{:x?}", &ccsds_buf[0..6]);
|
||||
//! ```
|
||||
#![no_std]
|
||||
#![cfg_attr(docs_rs, feature(doc_auto_cfg))]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#[cfg(feature = "alloc")]
|
||||
extern crate alloc;
|
||||
#[cfg(any(feature = "std", test))]
|
||||
@ -505,8 +505,8 @@ pub struct SpHeader {
|
||||
pub type SpacePacketHeader = SpHeader;
|
||||
|
||||
impl Default for SpHeader {
|
||||
/// The default function sets the sequence flag field to [SequenceFlags::Unsegmented]. The data
|
||||
/// length field is set to 1, which denotes an empty space packets.
|
||||
/// The default function sets the sequence flag field to [SequenceFlags::Unsegmented] and the
|
||||
/// data length to 0.
|
||||
#[inline]
|
||||
fn default() -> Self {
|
||||
SpHeader {
|
||||
@ -516,7 +516,7 @@ impl Default for SpHeader {
|
||||
seq_flags: SequenceFlags::Unsegmented,
|
||||
seq_count: 0,
|
||||
},
|
||||
data_len: 1,
|
||||
data_len: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -532,8 +532,8 @@ impl SpHeader {
|
||||
}
|
||||
}
|
||||
|
||||
/// This constructor sets the sequence flag field to [SequenceFlags::Unsegmented]. The data
|
||||
/// length field is set to 1, which denotes an empty space packets.
|
||||
/// This constructor sets the sequence flag field to [SequenceFlags::Unsegmented] and the data
|
||||
/// length to 0.
|
||||
///
|
||||
/// This constructor will panic if the APID exceeds [MAX_APID].
|
||||
#[inline]
|
||||
@ -545,7 +545,7 @@ impl SpHeader {
|
||||
seq_flags: SequenceFlags::Unsegmented,
|
||||
seq_count: 0,
|
||||
},
|
||||
data_len: 1,
|
||||
data_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -559,7 +559,7 @@ impl SpHeader {
|
||||
seq_flags: SequenceFlags::Unsegmented,
|
||||
seq_count: 0,
|
||||
},
|
||||
data_len: 1,
|
||||
data_len: 0,
|
||||
})
|
||||
}
|
||||
|
||||
@ -568,7 +568,7 @@ impl SpHeader {
|
||||
///
|
||||
/// The checked constructor variants can be used to avoid panics.
|
||||
#[inline]
|
||||
const fn new_from_fields(
|
||||
pub const fn new_from_fields(
|
||||
ptype: PacketType,
|
||||
sec_header: bool,
|
||||
apid: u16,
|
||||
@ -755,6 +755,15 @@ impl SpHeader {
|
||||
.ok_or(ByteConversionError::ZeroCopyToError)?;
|
||||
Ok(&mut buf[CCSDS_HEADER_LEN..])
|
||||
}
|
||||
|
||||
/// Create a vector containing the CCSDS header.
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn to_vec(&self) -> alloc::vec::Vec<u8> {
|
||||
let mut vec = alloc::vec![0; CCSDS_HEADER_LEN];
|
||||
// This can not fail.
|
||||
self.write_to_be_bytes(&mut vec[..]).unwrap();
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
impl CcsdsPacket for SpHeader {
|
||||
@ -1260,12 +1269,14 @@ pub(crate) mod tests {
|
||||
fn sp_header_from_apid() {
|
||||
let sp_header = SpHeader::new_from_apid(0x03);
|
||||
assert_eq!(sp_header.apid(), 0x03);
|
||||
assert_eq!(sp_header.data_len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sp_header_from_apid_checked() {
|
||||
let sp_header = SpHeader::new_from_apid_checked(0x03).unwrap();
|
||||
assert_eq!(sp_header.apid(), 0x03);
|
||||
assert_eq!(sp_header.data_len(), 0);
|
||||
}
|
||||
|
||||
#[cfg(feature = "defmt")]
|
||||
@ -1279,4 +1290,14 @@ pub(crate) mod tests {
|
||||
expected: 2,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sp_header_as_vec() {
|
||||
let sp_header = SpHeader::new_for_unseg_tc(0x42, 25, 1);
|
||||
let sp_header_as_vec = sp_header.to_vec();
|
||||
let sp_header_read_back = SpHeader::from_be_bytes(&sp_header_as_vec)
|
||||
.expect("Error reading back SP header")
|
||||
.0;
|
||||
assert_eq!(sp_header, sp_header_read_back);
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +71,19 @@ mod tests {
|
||||
use std::format;
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_a_unterminated() {
|
||||
fn test_ascii_timestamp_a_unterminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_a(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 10);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_A_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_a_unterminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_a(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -82,7 +94,24 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_a_terminated() {
|
||||
fn test_ascii_timestamp_a_terminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_a_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 10);
|
||||
let z_terminator = stamp.find('Z');
|
||||
assert!(z_terminator.is_some());
|
||||
assert_eq!(
|
||||
z_terminator.unwrap(),
|
||||
FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1 - 1
|
||||
);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1);
|
||||
}
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_a_terminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_a_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -99,7 +128,19 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_b_unterminated() {
|
||||
fn test_ascii_timestamp_b_unterminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_b(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 8);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_B_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_b_unterminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_b(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
@ -110,7 +151,25 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_timestamp_b_terminated() {
|
||||
fn test_ascii_timestamp_b_terminated_epoch() {
|
||||
let date = chrono::DateTime::UNIX_EPOCH;
|
||||
let stamp_formatter = generate_time_code_b_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
let t_sep = stamp.find('T');
|
||||
assert!(t_sep.is_some());
|
||||
assert_eq!(t_sep.unwrap(), 8);
|
||||
let z_terminator = stamp.find('Z');
|
||||
assert!(z_terminator.is_some());
|
||||
assert_eq!(
|
||||
z_terminator.unwrap(),
|
||||
FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1 - 1
|
||||
);
|
||||
assert_eq!(stamp.len(), FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ascii_timestamp_b_terminated_now() {
|
||||
let date = Utc::now();
|
||||
let stamp_formatter = generate_time_code_b_terminated(&date);
|
||||
let stamp = format!("{}", stamp_formatter);
|
||||
|
@ -1622,6 +1622,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now() {
|
||||
let timestamp_now = CdsTime::now_with_u16_days().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1629,6 +1630,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_us_prec() {
|
||||
let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1636,6 +1638,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec() {
|
||||
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1643,6 +1646,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec_u16_days() {
|
||||
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -1650,6 +1654,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_time_now_ps_prec_u24_days() {
|
||||
let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap();
|
||||
let compare_stamp = chrono::Utc::now();
|
||||
@ -2306,6 +2311,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_update_from_now() {
|
||||
let mut stamp = CdsTime::new_with_u16_days(0, 0);
|
||||
let _ = stamp.update_from_now();
|
||||
@ -2321,6 +2327,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "serde")]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_serialization() {
|
||||
let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time");
|
||||
let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed");
|
||||
|
@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};
|
||||
use core::fmt::{Debug, Display, Formatter};
|
||||
use core::ops::{Add, AddAssign};
|
||||
use core::time::Duration;
|
||||
use core::u64;
|
||||
|
||||
use crate::ByteConversionError;
|
||||
|
||||
@ -947,6 +946,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_datetime_now() {
|
||||
let now = chrono::Utc::now();
|
||||
let cuc_now = CucTime::now(FractionalResolution::SixtyNs, LEAP_SECONDS);
|
||||
@ -1278,6 +1278,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn set_fract_resolution() {
|
||||
let mut stamp = CucTime::new(2000);
|
||||
stamp.set_fractional_resolution(FractionalResolution::SixtyNs);
|
||||
|
@ -6,7 +6,6 @@ use core::cmp::Ordering;
|
||||
use core::fmt::{Display, Formatter};
|
||||
use core::ops::{Add, AddAssign, Sub};
|
||||
use core::time::Duration;
|
||||
use core::u8;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[cfg(not(feature = "std"))]
|
||||
@ -551,6 +550,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_get_current_time() {
|
||||
let sec_floats = seconds_since_epoch();
|
||||
assert!(sec_floats > 0.0);
|
||||
@ -565,6 +565,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_ccsds_epoch() {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
@ -685,6 +686,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)]
|
||||
fn test_from_now() {
|
||||
let stamp_now = UnixTime::now().unwrap();
|
||||
let dt_now = stamp_now.chrono_date_time().unwrap();
|
||||
|
Reference in New Issue
Block a user