some tweaks for tests
Some checks failed
Rust/sat-rs/pipeline/pr-main There was a failure building this commit
Some checks failed
Rust/sat-rs/pipeline/pr-main There was a failure building this commit
This commit is contained in:
parent
5ee225fa4e
commit
0a21fcf23a
@ -3,7 +3,7 @@
|
|||||||
//! # Example for the [StaticMemoryPool]
|
//! # Example for the [StaticMemoryPool]
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! use satrs_core::pool::{StaticMemoryPool, StaticPoolConfig, PoolProvider};
|
//! use satrs_core::pool::{PoolProviderMemInPlace, StaticMemoryPool, StaticPoolConfig};
|
||||||
//!
|
//!
|
||||||
//! // 4 buckets of 4 bytes, 2 of 8 bytes and 1 of 16 bytes
|
//! // 4 buckets of 4 bytes, 2 of 8 bytes and 1 of 16 bytes
|
||||||
//! let pool_cfg = StaticPoolConfig::new(vec![(4, 4), (2, 8), (1, 16)]);
|
//! let pool_cfg = StaticPoolConfig::new(vec![(4, 4), (2, 8), (1, 16)]);
|
||||||
@ -67,11 +67,11 @@
|
|||||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
|
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
|
||||||
pub use alloc_mod::*;
|
pub use alloc_mod::*;
|
||||||
use core::fmt::{Display, Formatter};
|
use core::fmt::{Display, Formatter};
|
||||||
|
use delegate::delegate;
|
||||||
#[cfg(feature = "serde")]
|
#[cfg(feature = "serde")]
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use delegate::delegate;
|
|
||||||
|
|
||||||
type NumBlocks = u16;
|
type NumBlocks = u16;
|
||||||
pub type StoreAddr = u64;
|
pub type StoreAddr = u64;
|
||||||
@ -308,7 +308,10 @@ impl<'a, MemProvider: PoolProviderMemInPlace> PoolRwGuard<'a, MemProvider> {
|
|||||||
|
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
mod alloc_mod {
|
mod alloc_mod {
|
||||||
use super::{PoolProviderMemInPlace, StaticPoolAddr, PoolProviderMemInPlaceWithGuards, PoolRwGuard, PoolGuard};
|
use super::{
|
||||||
|
PoolGuard, PoolProviderMemInPlace, PoolProviderMemInPlaceWithGuards, PoolRwGuard,
|
||||||
|
StaticPoolAddr,
|
||||||
|
};
|
||||||
use crate::pool::{NumBlocks, StoreAddr, StoreError, StoreIdError};
|
use crate::pool::{NumBlocks, StoreAddr, StoreError, StoreIdError};
|
||||||
use alloc::vec;
|
use alloc::vec;
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
@ -553,8 +556,9 @@ mod alloc_mod {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::pool::{
|
use crate::pool::{
|
||||||
PoolGuard, PoolProviderMemInPlace, PoolRwGuard, StaticMemoryPool, StaticPoolAddr,
|
PoolGuard, PoolProviderMemInPlace, PoolProviderMemInPlaceWithGuards, PoolRwGuard,
|
||||||
StaticPoolConfig, StoreError, StoreIdError, POOL_MAX_SIZE, PoolProviderMemInPlaceWithGuards,
|
StaticMemoryPool, StaticPoolAddr, StaticPoolConfig, StoreError, StoreIdError,
|
||||||
|
POOL_MAX_SIZE,
|
||||||
};
|
};
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
|
@ -369,7 +369,7 @@ mod alloc_mod {
|
|||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
|
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
|
||||||
pub mod std_mod {
|
pub mod std_mod {
|
||||||
use crate::pool::{PoolProviderMemInPlace, SharedStaticMemoryPool, StoreAddr};
|
use crate::pool::{PoolProviderMemInPlaceWithGuards, SharedStaticMemoryPool, StoreAddr};
|
||||||
use crate::pus::verification::{
|
use crate::pus::verification::{
|
||||||
StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
|
StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
|
||||||
};
|
};
|
||||||
|
@ -989,7 +989,7 @@ mod tests {
|
|||||||
.expect("insertion failed");
|
.expect("insertion failed");
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut test_closure_1 = |boolvar: bool, tc_info: &TcInfo, tc: &[u8]| {
|
let mut test_closure_1 = |boolvar: bool, tc_info: &TcInfo, _tc: &[u8]| {
|
||||||
common_check(boolvar, &tc_info.addr, vec![tc_info_0.addr()], &mut i);
|
common_check(boolvar, &tc_info.addr, vec![tc_info_0.addr()], &mut i);
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
@ -1011,7 +1011,7 @@ mod tests {
|
|||||||
assert!(pool.has_element_at(&tc_info_0.addr()).unwrap());
|
assert!(pool.has_element_at(&tc_info_0.addr()).unwrap());
|
||||||
|
|
||||||
// test 3, late timestamp, release 1 overdue tc
|
// test 3, late timestamp, release 1 overdue tc
|
||||||
let mut test_closure_2 = |boolvar: bool, tc_info: &TcInfo, tc: &[u8]| {
|
let mut test_closure_2 = |boolvar: bool, tc_info: &TcInfo, _tc: &[u8]| {
|
||||||
common_check(boolvar, &tc_info.addr, vec![tc_info_1.addr()], &mut i);
|
common_check(boolvar, &tc_info.addr, vec![tc_info_1.addr()], &mut i);
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
@ -1052,7 +1052,7 @@ mod tests {
|
|||||||
.expect("insertion failed");
|
.expect("insertion failed");
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut test_closure = |boolvar: bool, store_addr: &TcInfo, tc: &[u8]| {
|
let mut test_closure = |boolvar: bool, store_addr: &TcInfo, _tc: &[u8]| {
|
||||||
common_check(
|
common_check(
|
||||||
boolvar,
|
boolvar,
|
||||||
&store_addr.addr,
|
&store_addr.addr,
|
||||||
@ -1111,7 +1111,7 @@ mod tests {
|
|||||||
.expect("insertion failed");
|
.expect("insertion failed");
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut test_closure_1 = |boolvar: bool, tc_info: &TcInfo, tc: &[u8]| {
|
let mut test_closure_1 = |boolvar: bool, tc_info: &TcInfo, _tc: &[u8]| {
|
||||||
common_check_disabled(boolvar, &tc_info.addr, vec![tc_info_0.addr()], &mut i);
|
common_check_disabled(boolvar, &tc_info.addr, vec![tc_info_0.addr()], &mut i);
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
@ -1133,7 +1133,7 @@ mod tests {
|
|||||||
assert!(!pool.has_element_at(&tc_info_0.addr()).unwrap());
|
assert!(!pool.has_element_at(&tc_info_0.addr()).unwrap());
|
||||||
|
|
||||||
// test 3, late timestamp, release 1 overdue tc
|
// test 3, late timestamp, release 1 overdue tc
|
||||||
let mut test_closure_2 = |boolvar: bool, tc_info: &TcInfo, tc: &[u8]| {
|
let mut test_closure_2 = |boolvar: bool, tc_info: &TcInfo, _tc: &[u8]| {
|
||||||
common_check_disabled(boolvar, &tc_info.addr, vec![tc_info_1.addr()], &mut i);
|
common_check_disabled(boolvar, &tc_info.addr, vec![tc_info_1.addr()], &mut i);
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
@ -1221,7 +1221,7 @@ mod tests {
|
|||||||
assert!(pool.has_element_at(&info.addr).unwrap());
|
assert!(pool.has_element_at(&info.addr).unwrap());
|
||||||
|
|
||||||
let data = pool.read(&info.addr).unwrap();
|
let data = pool.read(&info.addr).unwrap();
|
||||||
let check_tc = PusTcReader::new(&data).expect("incorrect Pus tc raw data");
|
let check_tc = PusTcReader::new(data).expect("incorrect Pus tc raw data");
|
||||||
assert_eq!(check_tc.0, base_ping_tc_simple_ctor(0, None));
|
assert_eq!(check_tc.0, base_ping_tc_simple_ctor(0, None));
|
||||||
|
|
||||||
assert_eq!(scheduler.num_scheduled_telecommands(), 1);
|
assert_eq!(scheduler.num_scheduled_telecommands(), 1);
|
||||||
@ -1231,7 +1231,7 @@ mod tests {
|
|||||||
let mut addr_vec = Vec::new();
|
let mut addr_vec = Vec::new();
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut test_closure = |boolvar: bool, tc_info: &TcInfo, tc: &[u8]| {
|
let mut test_closure = |boolvar: bool, tc_info: &TcInfo, _tc: &[u8]| {
|
||||||
common_check(boolvar, &tc_info.addr, vec![info.addr], &mut i);
|
common_check(boolvar, &tc_info.addr, vec![info.addr], &mut i);
|
||||||
// check that tc remains unchanged
|
// check that tc remains unchanged
|
||||||
addr_vec.push(tc_info.addr);
|
addr_vec.push(tc_info.addr);
|
||||||
@ -1243,7 +1243,7 @@ mod tests {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let data = pool.read(&addr_vec[0]).unwrap();
|
let data = pool.read(&addr_vec[0]).unwrap();
|
||||||
let check_tc = PusTcReader::new(&data).expect("incorrect PUS tc raw data");
|
let check_tc = PusTcReader::new(data).expect("incorrect PUS tc raw data");
|
||||||
assert_eq!(check_tc.0, base_ping_tc_simple_ctor(0, None));
|
assert_eq!(check_tc.0, base_ping_tc_simple_ctor(0, None));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1494,7 +1494,7 @@ mod tests {
|
|||||||
let del_res =
|
let del_res =
|
||||||
scheduler.delete_by_request_id_and_from_pool(&tc_info_2.request_id(), &mut pool);
|
scheduler.delete_by_request_id_and_from_pool(&tc_info_2.request_id(), &mut pool);
|
||||||
assert!(del_res.is_ok());
|
assert!(del_res.is_ok());
|
||||||
assert_eq!(del_res.unwrap(), true);
|
assert!(del_res.unwrap());
|
||||||
assert!(!pool.has_element_at(&tc_info_2.addr()).unwrap());
|
assert!(!pool.has_element_at(&tc_info_2.addr()).unwrap());
|
||||||
assert_eq!(scheduler.num_scheduled_telecommands(), 1);
|
assert_eq!(scheduler.num_scheduled_telecommands(), 1);
|
||||||
|
|
||||||
@ -1502,7 +1502,7 @@ mod tests {
|
|||||||
let addr_1 =
|
let addr_1 =
|
||||||
scheduler.delete_by_request_id_and_from_pool(&tc_info_1.request_id(), &mut pool);
|
scheduler.delete_by_request_id_and_from_pool(&tc_info_1.request_id(), &mut pool);
|
||||||
assert!(addr_1.is_ok());
|
assert!(addr_1.is_ok());
|
||||||
assert_eq!(addr_1.unwrap(), true);
|
assert!(addr_1.unwrap());
|
||||||
assert!(!pool.has_element_at(&tc_info_1.addr()).unwrap());
|
assert!(!pool.has_element_at(&tc_info_1.addr()).unwrap());
|
||||||
assert_eq!(scheduler.num_scheduled_telecommands(), 0);
|
assert_eq!(scheduler.num_scheduled_telecommands(), 0);
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//! use std::sync::{Arc, mpsc, RwLock};
|
//! use std::sync::{Arc, mpsc, RwLock};
|
||||||
//! use std::time::Duration;
|
//! use std::time::Duration;
|
||||||
//! use satrs_core::pool::{StaticMemoryPool, StaticPoolConfig, PoolProvider, SharedPool};
|
//! use satrs_core::pool::{PoolProviderMemInPlaceWithGuards, StaticMemoryPool, StaticPoolConfig};
|
||||||
//! use satrs_core::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender};
|
//! use satrs_core::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender};
|
||||||
//! use satrs_core::seq_count::SeqCountProviderSimple;
|
//! use satrs_core::seq_count::SeqCountProviderSimple;
|
||||||
//! use satrs_core::pus::MpscTmInStoreSender;
|
//! use satrs_core::pus::MpscTmInStoreSender;
|
||||||
@ -30,7 +30,7 @@
|
|||||||
//!
|
//!
|
||||||
//! let pool_cfg = StaticPoolConfig::new(vec![(10, 32), (10, 64), (10, 128), (10, 1024)]);
|
//! let pool_cfg = StaticPoolConfig::new(vec![(10, 32), (10, 64), (10, 128), (10, 1024)]);
|
||||||
//! let tm_pool = StaticMemoryPool::new(pool_cfg.clone());
|
//! let tm_pool = StaticMemoryPool::new(pool_cfg.clone());
|
||||||
//! let shared_tm_store = SharedTmStore::new(Box::new(tm_pool));
|
//! let shared_tm_store = SharedTmStore::new(tm_pool);
|
||||||
//! let tm_store = shared_tm_store.clone_backing_pool();
|
//! let tm_store = shared_tm_store.clone_backing_pool();
|
||||||
//! let (verif_tx, verif_rx) = mpsc::channel();
|
//! let (verif_tx, verif_rx) = mpsc::channel();
|
||||||
//! let sender = MpscTmInStoreSender::new(0, "Test Sender", shared_tm_store, verif_tx);
|
//! let sender = MpscTmInStoreSender::new(0, "Test Sender", shared_tm_store, verif_tx);
|
||||||
@ -1325,7 +1325,7 @@ mod std_mod {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::pool::{MemPoolProviderWithGuards, StaticMemoryPool, StaticPoolConfig};
|
use crate::pool::{PoolProviderMemInPlaceWithGuards, StaticMemoryPool, StaticPoolConfig};
|
||||||
use crate::pus::tests::CommonTmInfo;
|
use crate::pus::tests::CommonTmInfo;
|
||||||
use crate::pus::verification::{
|
use crate::pus::verification::{
|
||||||
EcssTmSenderCore, EcssTmtcError, FailParams, FailParamsWithStep, RequestId, TcStateNone,
|
EcssTmSenderCore, EcssTmtcError, FailParams, FailParamsWithStep, RequestId, TcStateNone,
|
||||||
@ -1784,8 +1784,7 @@ mod tests {
|
|||||||
.rep()
|
.rep()
|
||||||
.start_success(accepted_token, &mut sender, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
.start_success(accepted_token, &mut sender, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
||||||
.expect("Sending start success failed");
|
.expect("Sending start success failed");
|
||||||
let mut empty = b
|
b.rep()
|
||||||
.rep()
|
|
||||||
.step_success(
|
.step_success(
|
||||||
&started_token,
|
&started_token,
|
||||||
&mut sender,
|
&mut sender,
|
||||||
@ -1793,16 +1792,13 @@ mod tests {
|
|||||||
EcssEnumU8::new(0),
|
EcssEnumU8::new(0),
|
||||||
)
|
)
|
||||||
.expect("Sending step 0 success failed");
|
.expect("Sending step 0 success failed");
|
||||||
assert_eq!(empty, ());
|
b.vr.step_success(
|
||||||
empty =
|
&started_token,
|
||||||
b.vr.step_success(
|
&mut sender,
|
||||||
&started_token,
|
Some(&EMPTY_STAMP),
|
||||||
&mut sender,
|
EcssEnumU8::new(1),
|
||||||
Some(&EMPTY_STAMP),
|
)
|
||||||
EcssEnumU8::new(1),
|
.expect("Sending step 1 success failed");
|
||||||
)
|
|
||||||
.expect("Sending step 1 success failed");
|
|
||||||
assert_eq!(empty, ());
|
|
||||||
assert_eq!(sender.service_queue.borrow().len(), 4);
|
assert_eq!(sender.service_queue.borrow().len(), 4);
|
||||||
step_success_check(&mut sender, tok.req_id);
|
step_success_check(&mut sender, tok.req_id);
|
||||||
}
|
}
|
||||||
@ -1818,16 +1814,12 @@ mod tests {
|
|||||||
.helper
|
.helper
|
||||||
.start_success(accepted_token, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
.start_success(accepted_token, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
||||||
.expect("Sending start success failed");
|
.expect("Sending start success failed");
|
||||||
let mut empty = b
|
b.helper
|
||||||
.helper
|
|
||||||
.step_success(&started_token, Some(&EMPTY_STAMP), EcssEnumU8::new(0))
|
.step_success(&started_token, Some(&EMPTY_STAMP), EcssEnumU8::new(0))
|
||||||
.expect("Sending step 0 success failed");
|
.expect("Sending step 0 success failed");
|
||||||
assert_eq!(empty, ());
|
b.helper
|
||||||
empty = b
|
|
||||||
.helper
|
|
||||||
.step_success(&started_token, Some(&EMPTY_STAMP), EcssEnumU8::new(1))
|
.step_success(&started_token, Some(&EMPTY_STAMP), EcssEnumU8::new(1))
|
||||||
.expect("Sending step 1 success failed");
|
.expect("Sending step 1 success failed");
|
||||||
assert_eq!(empty, ());
|
|
||||||
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap();
|
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap();
|
||||||
assert_eq!(sender.service_queue.borrow().len(), 4);
|
assert_eq!(sender.service_queue.borrow().len(), 4);
|
||||||
step_success_check(sender, tok.req_id);
|
step_success_check(sender, tok.req_id);
|
||||||
@ -2122,10 +2114,8 @@ mod tests {
|
|||||||
let started_token =
|
let started_token =
|
||||||
b.vr.start_success(accepted_token, &mut sender, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
b.vr.start_success(accepted_token, &mut sender, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
||||||
.expect("Sending start success failed");
|
.expect("Sending start success failed");
|
||||||
let empty =
|
b.vr.completion_success(started_token, &mut sender, Some(&EMPTY_STAMP))
|
||||||
b.vr.completion_success(started_token, &mut sender, Some(&EMPTY_STAMP))
|
.expect("Sending completion success failed");
|
||||||
.expect("Sending completion success failed");
|
|
||||||
assert_eq!(empty, ());
|
|
||||||
completion_success_check(&mut sender, tok.req_id);
|
completion_success_check(&mut sender, tok.req_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2140,11 +2130,9 @@ mod tests {
|
|||||||
.helper
|
.helper
|
||||||
.start_success(accepted_token, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
.start_success(accepted_token, Some(&[0, 1, 0, 1, 0, 1, 0]))
|
||||||
.expect("Sending start success failed");
|
.expect("Sending start success failed");
|
||||||
let empty = b
|
b.helper
|
||||||
.helper
|
|
||||||
.completion_success(started_token, Some(&EMPTY_STAMP))
|
.completion_success(started_token, Some(&EMPTY_STAMP))
|
||||||
.expect("Sending completion success failed");
|
.expect("Sending completion success failed");
|
||||||
assert_eq!(empty, ());
|
|
||||||
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap();
|
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap();
|
||||||
completion_success_check(sender, tok.req_id);
|
completion_success_check(sender, tok.req_id);
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
use satrs_core::pool::{MemPoolProvider, PoolGuard, StaticMemoryPool, StaticPoolConfig, StoreAddr};
|
use satrs_core::pool::{
|
||||||
|
PoolGuard, PoolProviderMemInPlace, StaticMemoryPool, StaticPoolConfig, StoreAddr,
|
||||||
|
};
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use std::sync::mpsc::{Receiver, Sender};
|
use std::sync::mpsc::{Receiver, Sender};
|
||||||
|
@ -3,7 +3,7 @@ use std::{net::SocketAddr, sync::mpsc::Receiver};
|
|||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use satrs_core::{
|
use satrs_core::{
|
||||||
hal::std::udp_server::{ReceiveResult, UdpTcServer},
|
hal::std::udp_server::{ReceiveResult, UdpTcServer},
|
||||||
pool::{PoolProviderMemInPlace, SharedStaticMemoryPool, StoreAddr},
|
pool::{PoolProviderMemInPlaceWithGuards, SharedStaticMemoryPool, StoreAddr},
|
||||||
tmtc::CcsdsError,
|
tmtc::CcsdsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user