18 Commits

Author SHA1 Message Date
b810fbb12a add owned TLV type
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-06-05 14:36:05 +02:00
0e347b0e37 Merge pull request 'Bump MSRV' (#97) from bump-msrv into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #97
2024-05-19 13:07:12 +02:00
58dabb6f2f specify exact required version
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 09:13:12 +02:00
7fd65aa592 bumped MSRV
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 09:12:39 +02:00
0024afc83e Merge pull request 'prep patch release' (#96) from prep-v0.11.2 into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #96
2024-05-19 09:02:46 +02:00
c48bd848d3 prep patch release
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-19 08:49:03 +02:00
b8be9ae641 Merge pull request 'Fixes for Miri' (#95) from fixes-for-miri into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #95
2024-05-15 13:03:24 +02:00
c2506dbba9 Merge branch 'main' into fixes-for-miri
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Rust/spacepackets/pipeline/pr-main This commit looks good
2024-05-14 19:25:07 +02:00
b842b9d11a Merge pull request 'remove defmt::Format impl for MetadataPduCreator' (#94) from fix-defmt-derives into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #94
2024-05-14 19:24:57 +02:00
374c034e92 add miri chapter in README
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-14 15:37:20 +02:00
791c7f6e02 it is now possible to run cargo miri 2024-05-14 15:34:40 +02:00
8001938507 remove defmt::Format impl for MetadataPduCreator
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
2024-05-14 15:01:26 +02:00
73ab7ff148 Merge pull request 'add doctests to github CI' (#93) from github-ci-doctest into main
Some checks failed
Rust/spacepackets/pipeline/head There was a failure building this commit
Reviewed-on: #93
2024-05-02 14:56:13 +02:00
c59d01174f add doctests to github CI
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
2024-05-02 14:48:31 +02:00
eb49bff0c9 Merge pull request 'update github CI' (#92) from update-github-ci into main
All checks were successful
Rust/spacepackets/pipeline/head This commit looks good
Reviewed-on: #92
2024-05-02 14:29:53 +02:00
af392d40d0 this might work
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
Rust/spacepackets/pipeline/pr-main Build queued...
2024-05-02 14:22:03 +02:00
b78bfe2114 some fixes
Some checks are pending
Rust/spacepackets/pipeline/head Build queued...
Rust/spacepackets/pipeline/pr-main Build queued...
2024-05-02 14:16:20 +02:00
69a3b1d8f3 update github CI
Some checks are pending
Rust/spacepackets/pipeline/pr-main Build queued...
Rust/spacepackets/pipeline/head Build started...
2024-05-02 14:12:26 +02:00
12 changed files with 345 additions and 119 deletions

View File

@ -1,42 +1,39 @@
on: [push]
name: ci
on: [push, pull_request]
jobs:
check:
name: Check
name: Check build
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
- uses: actions-rs/cargo@v1
with:
command: check
args: --release
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo check --release
msrv:
name: Check with MSRV
test:
name: Run Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: 1.65.0
override: true
profile: minimal
- uses: actions-rs/cargo@v1
with:
command: check
args: --release
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: Install nextest
uses: taiki-e/install-action@nextest
- run: cargo nextest run --all-features
- run: cargo test --doc
msrv:
name: Check MSRV
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.68.2
- run: cargo check --release
cross-check:
name: Check Cross
name: Check Cross-Compilation
runs-on: ubuntu-latest
strategy:
matrix:
@ -44,70 +41,32 @@ jobs:
- armv7-unknown-linux-gnueabihf
- thumbv7em-none-eabihf
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
with:
profile: minimal
toolchain: stable
target: ${{ matrix.target }}
override: true
- uses: actions-rs/cargo@v1
with:
use-cross: true
command: check
args: --release --target=${{ matrix.target }} --no-default-features
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
- run: cargo check --release --target=${{matrix.target}} --no-default-features
fmt:
name: Rustfmt
name: Check formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo fmt --all -- --check
check-doc:
docs:
name: Check Documentation Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
profile: minimal
- uses: actions-rs/cargo@v1
with:
command: doc
args: --all-features
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- run: cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docs_rs"]'
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: -- -D warnings
ci:
if: ${{ success() }}
# all new jobs must be added to this list
needs: [check, fmt, clippy]
runs-on: ubuntu-latest
steps:
- name: CI succeeded
run: exit 0
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo clippy -- -D warnings

View File

@ -8,6 +8,25 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased]
# [v0.12.0] 2024-06-05
## Added
- Added new `cfdp::tlv::TlvOwned` type which erases the lifetime and is clonable.
## Added and Changed
- Added new `ReadableTlv` to avoid some boilerplate code and have a common abstraction implemented
for both `Tlv` and `TlvOwned` to read the raw TLV data field and its length.
# [v0.11.2] 2024-05-19
- Bumped MSRV to 1.68.2
## Fixed
- Removed `defmt::Format` impl for `MetadataPduCreator` which seems to be problematic.
# [v0.11.1] 2024-04-22
## Fixed

View File

@ -1,8 +1,8 @@
[package]
name = "spacepackets"
version = "0.11.1"
version = "0.11.2"
edition = "2021"
rust-version = "1.65"
rust-version = "1.68.2"
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
description = "Generic implementations for various CCSDS and ECSS packet standards"
homepage = "https://egit.irs.uni-stuttgart.de/rust/spacepackets"
@ -60,7 +60,7 @@ chrono = "0.4"
default = ["std"]
std = ["chrono/std", "chrono/clock", "alloc", "thiserror"]
serde = ["dep:serde", "chrono/serde"]
alloc = ["postcard/alloc", "chrono/alloc"]
alloc = ["postcard/alloc", "chrono/alloc", "defmt/alloc", "serde/alloc"]
chrono = ["dep:chrono"]
timelib = ["dep:time"]
defmt = ["dep:defmt"]

View File

@ -61,3 +61,13 @@ cargo install grcov --locked
After that, you can simply run `coverage.py` to test the project with coverage. You can optionally
supply the `--open` flag to open the coverage report in your webbrowser.
# Miri
You can run the [`miri`](https://github.com/rust-lang/miri) tool on this library to check for
undefined behaviour (UB). This library does not use use any `unsafe` code blocks, but `miri` could
still catch UB from used libraries.
```sh
cargo +nightly miri nextest run --all-features
```

View File

@ -10,6 +10,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv;
use super::{CfdpPdu, WritablePduPacket};
#[derive(Debug, Copy, Clone, PartialEq, Eq, TryFromPrimitive, IntoPrimitive)]

View File

@ -11,6 +11,7 @@ use alloc::vec::Vec;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use super::tlv::ReadableTlv;
use super::{CfdpPdu, WritablePduPacket};
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
@ -56,7 +57,6 @@ pub fn build_metadata_opts_from_vec(
/// This abstraction exposes a specialized API for creating metadata PDUs as specified in
/// CFDP chapter 5.2.5.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct MetadataPduCreator<'src_name, 'dest_name, 'opts> {
pdu_header: PduHeader,
metadata_params: MetadataGenericParams,
@ -355,7 +355,7 @@ pub mod tests {
};
use crate::cfdp::pdu::{CfdpPdu, PduError, WritablePduPacket};
use crate::cfdp::pdu::{FileDirectiveType, PduHeader};
use crate::cfdp::tlv::{Tlv, TlvType};
use crate::cfdp::tlv::{ReadableTlv, Tlv, TlvType};
use crate::cfdp::{
ChecksumType, CrcFlag, Direction, LargeFileFlag, PduType, SegmentMetadataFlag,
SegmentationControl, TransmissionMode,

View File

@ -9,6 +9,8 @@ use crate::ByteConversionError;
use alloc::vec;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
@ -39,6 +41,26 @@ pub trait GenericTlv {
}
}
pub trait ReadableTlv {
fn value(&self) -> &[u8];
/// Checks whether the value field is empty.
fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
fn len_full(&self) -> usize {
self.len_value() + 2
}
}
pub trait WritableTlv {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
fn len_written(&self) -> usize;
@ -151,26 +173,6 @@ impl<'data> Tlv<'data> {
}
}
pub fn value(&self) -> &[u8] {
self.lv.value()
}
/// Checks whether the value field is empty.
pub fn is_empty(&self) -> bool {
self.value().is_empty()
}
/// Helper method to retrieve the length of the value. Simply calls the [slice::len] method of
/// [Self::value]
pub fn len_value(&self) -> usize {
self.value().len()
}
/// Returns the full raw length, including the length byte.
pub fn len_full(&self) -> usize {
self.len_value() + 2
}
/// Creates a TLV give a raw bytestream. Please note that is is not necessary to pass the
/// bytestream with the exact size of the expected TLV. This function will take care
/// of parsing the length byte, and the length of the parsed TLV can be retrieved using
@ -192,6 +194,26 @@ impl<'data> Tlv<'data> {
pub fn raw_data(&self) -> Option<&[u8]> {
self.lv.raw_data()
}
#[cfg(feature = "alloc")]
pub fn to_owned(&self) -> TlvOwned {
TlvOwned {
tlv_type_field: self.tlv_type_field,
data: self.value().to_vec(),
}
}
}
#[cfg(feature = "alloc")]
impl PartialEq<TlvOwned> for Tlv<'_> {
fn eq(&self, other: &TlvOwned) -> bool {
self.tlv_type_field == other.tlv_type_field && self.value() == other.value()
}
}
impl ReadableTlv for Tlv<'_> {
fn value(&self) -> &[u8] {
self.lv.value()
}
}
impl WritableTlv for Tlv<'_> {
@ -212,18 +234,81 @@ impl GenericTlv for Tlv<'_> {
}
}
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
found: raw_type,
expected: Some(expected_tlv_type.into()),
})?;
if tlv_type != expected_tlv_type {
return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8,
expected: Some(expected_tlv_type as u8),
});
#[cfg(feature = "alloc")]
pub mod alloc_mod {
use super::*;
/// Owned variant of [Tlv] which is consequently [Clone]able and does not have a lifetime
/// associated to a data slice.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct TlvOwned {
pub(crate) tlv_type_field: TlvTypeField,
pub(crate) data: Vec<u8>,
}
impl TlvOwned {
pub fn new(tlv_type: TlvType, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: data.to_vec(),
}
}
pub fn new_with_custom_type(tlv_type: u8, data: &[u8]) -> Self {
Self {
tlv_type_field: TlvTypeField::Custom(tlv_type),
data: data.to_vec(),
}
}
/// Creates a TLV with an empty value field.
pub fn new_empty(tlv_type: TlvType) -> Self {
Self {
tlv_type_field: TlvTypeField::Standard(tlv_type),
data: Vec::new(),
}
}
}
impl ReadableTlv for TlvOwned {
fn value(&self) -> &[u8] {
&self.data
}
}
impl WritableTlv for TlvOwned {
fn write_to_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
generic_len_check_data_serialization(buf, self.data.len(), MIN_TLV_LEN)?;
buf[0] = self.tlv_type_field.into();
buf[1] = self.data.len() as u8;
buf[2..2 + self.data.len()].copy_from_slice(&self.data);
Ok(self.len_written())
}
fn len_written(&self) -> usize {
self.data.len() + 2
}
}
impl GenericTlv for TlvOwned {
fn tlv_type_field(&self) -> TlvTypeField {
self.tlv_type_field
}
}
impl From<Tlv<'_>> for TlvOwned {
fn from(value: Tlv<'_>) -> Self {
value.to_owned()
}
}
impl PartialEq<Tlv<'_>> for TlvOwned {
fn eq(&self, other: &Tlv) -> bool {
self.tlv_type_field == other.tlv_type_field && self.data == other.value()
}
}
Ok(())
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -752,6 +837,20 @@ impl GenericTlv for FilestoreResponseTlv<'_, '_, '_> {
}
}
pub(crate) fn verify_tlv_type(raw_type: u8, expected_tlv_type: TlvType) -> Result<(), TlvLvError> {
let tlv_type = TlvType::try_from(raw_type).map_err(|_| TlvLvError::InvalidTlvTypeField {
found: raw_type,
expected: Some(expected_tlv_type.into()),
})?;
if tlv_type != expected_tlv_type {
return Err(TlvLvError::InvalidTlvTypeField {
found: tlv_type as u8,
expected: Some(expected_tlv_type as u8),
});
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
@ -1300,4 +1399,71 @@ mod tests {
assert_eq!(tlv_as_vec[0], 20);
assert_eq!(tlv_as_vec[1], 0);
}
#[test]
fn test_tlv_to_owned() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned = tlv_res.to_owned();
assert_eq!(tlv_res, tlv_owned);
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_owned_from_conversion, tlv_owned);
assert_eq!(tlv_owned_from_conversion, tlv_res);
}
#[test]
fn test_owned_tlv() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = TlvOwned::new(TlvType::EntityId, &buf[0..1]);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::EntityId)
);
assert_eq!(tlv_res.len_full(), 3);
assert_eq!(tlv_res.value().len(), 1);
assert_eq!(tlv_res.len_value(), 1);
assert!(!tlv_res.is_empty());
assert_eq!(tlv_res.value()[0], 5);
}
#[test]
fn test_owned_tlv_empty() {
let tlv_res = TlvOwned::new_empty(TlvType::FlowLabel);
assert_eq!(
tlv_res.tlv_type_field(),
TlvTypeField::Standard(TlvType::FlowLabel)
);
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_custom_type() {
let tlv_res = TlvOwned::new_with_custom_type(32, &[]);
assert_eq!(tlv_res.tlv_type_field(), TlvTypeField::Custom(32));
assert_eq!(tlv_res.len_full(), 2);
assert_eq!(tlv_res.value().len(), 0);
assert_eq!(tlv_res.len_value(), 0);
assert!(tlv_res.is_empty());
}
#[test]
fn test_owned_tlv_conversion_to_bytes() {
let entity_id = UbfU8::new(5);
let mut buf: [u8; 4] = [0; 4];
assert!(entity_id.write_to_be_bytes(&mut buf).is_ok());
let tlv_res = Tlv::new(TlvType::EntityId, &buf[0..1]);
assert!(tlv_res.is_ok());
let tlv_res = tlv_res.unwrap();
let tlv_owned_from_conversion: TlvOwned = tlv_res.into();
assert_eq!(tlv_res.to_vec(), tlv_owned_from_conversion.to_vec());
}
}

View File

@ -1,5 +1,5 @@
//! Abstractions for the Message to User CFDP TLV subtype.
use super::{GenericTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use super::{GenericTlv, ReadableTlv, Tlv, TlvLvError, TlvType, TlvTypeField, WritableTlv};
use crate::ByteConversionError;
use delegate::delegate;

View File

@ -71,7 +71,19 @@ mod tests {
use std::format;
#[test]
fn test_ascii_timestamp_a_unterminated() {
fn test_ascii_timestamp_a_unterminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
assert_eq!(stamp.len(), FMT_STR_CODE_A_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_unterminated_now() {
let date = Utc::now();
let stamp_formatter = generate_time_code_a(&date);
let stamp = format!("{}", stamp_formatter);
@ -82,7 +94,24 @@ mod tests {
}
#[test]
fn test_ascii_timestamp_a_terminated() {
fn test_ascii_timestamp_a_terminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 10);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_A_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_a_terminated_now() {
let date = Utc::now();
let stamp_formatter = generate_time_code_a_terminated(&date);
let stamp = format!("{}", stamp_formatter);
@ -99,7 +128,19 @@ mod tests {
}
#[test]
fn test_ascii_timestamp_b_unterminated() {
fn test_ascii_timestamp_b_unterminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
assert_eq!(stamp.len(), FMT_STR_CODE_B_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_unterminated_now() {
let date = Utc::now();
let stamp_formatter = generate_time_code_b(&date);
let stamp = format!("{}", stamp_formatter);
@ -110,7 +151,25 @@ mod tests {
}
#[test]
fn test_ascii_timestamp_b_terminated() {
fn test_ascii_timestamp_b_terminated_epoch() {
let date = chrono::DateTime::UNIX_EPOCH;
let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter);
let t_sep = stamp.find('T');
assert!(t_sep.is_some());
assert_eq!(t_sep.unwrap(), 8);
let z_terminator = stamp.find('Z');
assert!(z_terminator.is_some());
assert_eq!(
z_terminator.unwrap(),
FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1 - 1
);
assert_eq!(stamp.len(), FMT_STR_CODE_B_TERMINATED_WITH_SIZE.1);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ascii_timestamp_b_terminated_now() {
let date = Utc::now();
let stamp_formatter = generate_time_code_b_terminated(&date);
let stamp = format!("{}", stamp_formatter);

View File

@ -1622,6 +1622,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_time_now() {
let timestamp_now = CdsTime::now_with_u16_days().unwrap();
let compare_stamp = chrono::Utc::now();
@ -1629,6 +1630,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_us_prec() {
let timestamp_now = CdsTime::now_with_u16_days_us_precision().unwrap();
let compare_stamp = chrono::Utc::now();
@ -1636,6 +1638,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now();
@ -1643,6 +1646,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u16_days() {
let timestamp_now = CdsTime::from_now_with_u16_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now();
@ -1650,6 +1654,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_time_now_ps_prec_u24_days() {
let timestamp_now = CdsTime::now_with_u24_days_ps_precision().unwrap();
let compare_stamp = chrono::Utc::now();
@ -2306,6 +2311,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_update_from_now() {
let mut stamp = CdsTime::new_with_u16_days(0, 0);
let _ = stamp.update_from_now();
@ -2321,6 +2327,7 @@ mod tests {
#[test]
#[cfg(feature = "serde")]
#[cfg_attr(miri, ignore)]
fn test_serialization() {
let stamp_now = CdsTime::now_with_u16_days().expect("Error retrieving time");
let val = to_allocvec(&stamp_now).expect("Serializing timestamp failed");

View File

@ -947,6 +947,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_datetime_now() {
let now = chrono::Utc::now();
let cuc_now = CucTime::now(FractionalResolution::SixtyNs, LEAP_SECONDS);
@ -1278,6 +1279,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn set_fract_resolution() {
let mut stamp = CucTime::new(2000);
stamp.set_fractional_resolution(FractionalResolution::SixtyNs);

View File

@ -551,6 +551,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_get_current_time() {
let sec_floats = seconds_since_epoch();
assert!(sec_floats > 0.0);
@ -565,6 +566,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_ccsds_epoch() {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
@ -685,6 +687,7 @@ mod tests {
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_from_now() {
let stamp_now = UnixTime::now().unwrap();
let dt_now = stamp_now.chrono_date_time().unwrap();