15 Commits

Author SHA1 Message Date
22ba6be780 small improvement
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-08 16:38:12 +01:00
2679815c28 added some more tests
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-08 16:36:37 +01:00
55df55a39c First version of asynchronix based mini simulator
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-07 17:19:16 +01:00
ab3d907d4e Merge pull request 'Refactor TMTC distribution modules' (#138) from ccsds-distrib-refactoring into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #138
2024-03-04 16:53:23 +01:00
3de5954898 Refactor TMTC distribution modules
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-04 16:26:34 +01:00
5600aa576c Merge pull request 'use generics for the PUS stack' (#134) from pus-stack-use-generics into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #134
2024-02-26 15:46:47 +01:00
88793cfa87 add some helper types
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-26 15:34:20 +01:00
223b637eb8 use generics for the PUS stack
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-26 15:18:15 +01:00
cf9b115e1e Merge pull request 'Refactored Verification Reporter Module' (#132) from refactor-verification-mod into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #132
2024-02-26 11:58:57 +01:00
eea9b11b39 refactored verification reporter
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Use generics instead of trait objects where applicable.
2024-02-26 11:41:42 +01:00
f21ab0017e Merge pull request 'fixed for scheduler' (#133) from scheduler-fixes into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #133
2024-02-26 11:15:50 +01:00
a7ca00317f cargo fmt
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-26 11:00:48 +01:00
75fda42f4f fixed for scheduler
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-02-26 10:53:33 +01:00
faf0f6f6c6 Merge pull request 'refactored event manager' (#131) from refactor-event-man into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #131
2024-02-23 14:31:48 +01:00
a690c7720d Refactored event manager
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-23 14:19:30 +01:00
49 changed files with 2862 additions and 1130 deletions

View File

@ -4,6 +4,7 @@ members = [
"satrs", "satrs",
"satrs-mib", "satrs-mib",
"satrs-example", "satrs-example",
"satrs-minisim",
"satrs-shared", "satrs-shared",
] ]

View File

@ -20,8 +20,8 @@ thiserror = "1"
derive-new = "0.5" derive-new = "0.5"
[dependencies.satrs] [dependencies.satrs]
version = "0.2.0-rc.0" # version = "0.2.0-rc.0"
# path = "../satrs" path = "../satrs"
[dependencies.satrs-mib] [dependencies.satrs-mib]
version = "0.1.1" version = "0.1.1"

View File

@ -1,7 +1,7 @@
use std::sync::mpsc::{self, TryRecvError}; use std::sync::mpsc::{self, TryRecvError};
use log::{info, warn}; use log::{info, warn};
use satrs::pus::verification::{VerificationReporterWithSender, VerificationReportingProvider}; use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::{EcssTmSender, PusTmWrapper}; use satrs::pus::{EcssTmSender, PusTmWrapper};
use satrs::request::TargetAndApidId; use satrs::request::TargetAndApidId;
use satrs::spacepackets::ecss::hk::Subservice as HkSubservice; use satrs::spacepackets::ecss::hk::Subservice as HkSubservice;
@ -21,19 +21,19 @@ use crate::{
update_time, update_time,
}; };
pub struct AcsTask { pub struct AcsTask<VerificationReporter: VerificationReportingProvider> {
timestamp: [u8; 7], timestamp: [u8; 7],
time_provider: TimeProvider<DaysLen16Bits>, time_provider: TimeProvider<DaysLen16Bits>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporter,
tm_sender: Box<dyn EcssTmSender>, tm_sender: Box<dyn EcssTmSender>,
request_rx: mpsc::Receiver<RequestWithToken>, request_rx: mpsc::Receiver<RequestWithToken>,
} }
impl AcsTask { impl<VerificationReporter: VerificationReportingProvider> AcsTask<VerificationReporter> {
pub fn new( pub fn new(
tm_sender: impl EcssTmSender, tm_sender: impl EcssTmSender,
request_rx: mpsc::Receiver<RequestWithToken>, request_rx: mpsc::Receiver<RequestWithToken>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporter,
) -> Self { ) -> Self {
Self { Self {
timestamp: [0; 7], timestamp: [0; 7],

View File

@ -1,21 +1,17 @@
use std::sync::mpsc::{self, SendError}; use std::sync::mpsc::{self};
use satrs::{ use satrs::{
event_man::{ event_man::{
EventManager, EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, EventManagerWithBoundedMpsc, EventSendProvider, EventU32SenderMpscBounded,
SendEventProvider, MpscEventReceiver,
}, },
events::EventU32, events::EventU32,
params::Params, params::Params,
pus::{ pus::{
event_man::{ event_man::{
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken, DefaultPusEventU32Dispatcher, EventReporter, EventRequest, EventRequestWithToken,
PusEventDispatcher,
},
verification::{
TcStateStarted, VerificationReporterWithSender, VerificationReportingProvider,
VerificationToken,
}, },
verification::{TcStateStarted, VerificationReportingProvider, VerificationToken},
EcssTmSender, EcssTmSender,
}, },
spacepackets::time::cds::{self, TimeProvider}, spacepackets::time::cds::{self, TimeProvider},
@ -24,38 +20,37 @@ use satrs_example::config::PUS_APID;
use crate::update_time; use crate::update_time;
pub type MpscEventManager = EventManager<SendError<(EventU32, Option<Params>)>>; pub struct PusEventHandler<VerificationReporter: VerificationReportingProvider> {
pub struct PusEventHandler {
event_request_rx: mpsc::Receiver<EventRequestWithToken>, event_request_rx: mpsc::Receiver<EventRequestWithToken>,
pus_event_dispatcher: PusEventDispatcher<(), EventU32>, pus_event_dispatcher: DefaultPusEventU32Dispatcher<()>,
pus_event_man_rx: mpsc::Receiver<(EventU32, Option<Params>)>, pus_event_man_rx: mpsc::Receiver<(EventU32, Option<Params>)>,
tm_sender: Box<dyn EcssTmSender>, tm_sender: Box<dyn EcssTmSender>,
time_provider: TimeProvider, time_provider: TimeProvider,
timestamp: [u8; 7], timestamp: [u8; 7],
verif_handler: VerificationReporterWithSender, verif_handler: VerificationReporter,
} }
/* /*
*/ */
impl PusEventHandler { impl<VerificationReporter: VerificationReportingProvider> PusEventHandler<VerificationReporter> {
pub fn new( pub fn new(
verif_handler: VerificationReporterWithSender, verif_handler: VerificationReporter,
event_manager: &mut MpscEventManager, event_manager: &mut EventManagerWithBoundedMpsc,
event_request_rx: mpsc::Receiver<EventRequestWithToken>, event_request_rx: mpsc::Receiver<EventRequestWithToken>,
tm_sender: impl EcssTmSender, tm_sender: impl EcssTmSender,
) -> Self { ) -> Self {
let (pus_event_man_tx, pus_event_man_rx) = mpsc::channel(); let event_queue_cap = 30;
let (pus_event_man_tx, pus_event_man_rx) = mpsc::sync_channel(event_queue_cap);
// All events sent to the manager are routed to the PUS event manager, which generates PUS event // All events sent to the manager are routed to the PUS event manager, which generates PUS event
// telemetry for each event. // telemetry for each event.
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap(); let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
let pus_event_dispatcher = let pus_event_dispatcher =
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend)); DefaultPusEventU32Dispatcher::new_with_default_backend(event_reporter);
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx); let pus_event_man_send_provider =
EventU32SenderMpscBounded::new(1, pus_event_man_tx, event_queue_cap);
event_manager.subscribe_all(pus_event_man_send_provider.id()); event_manager.subscribe_all(pus_event_man_send_provider.channel_id());
event_manager.add_sender(pus_event_man_send_provider); event_manager.add_sender(pus_event_man_send_provider);
Self { Self {
@ -117,7 +112,7 @@ impl PusEventHandler {
} }
pub struct EventManagerWrapper { pub struct EventManagerWrapper {
event_manager: MpscEventManager, event_manager: EventManagerWithBoundedMpsc,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>, event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
} }
@ -128,7 +123,7 @@ impl EventManagerWrapper {
let (event_sender, event_man_rx) = mpsc::channel(); let (event_sender, event_man_rx) = mpsc::channel();
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx); let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
Self { Self {
event_manager: EventManagerWithMpscQueue::new(Box::new(event_recv)), event_manager: EventManagerWithBoundedMpsc::new(event_recv),
event_sender, event_sender,
} }
} }
@ -137,7 +132,7 @@ impl EventManagerWrapper {
self.event_sender.clone() self.event_sender.clone()
} }
pub fn event_manager(&mut self) -> &mut MpscEventManager { pub fn event_manager(&mut self) -> &mut EventManagerWithBoundedMpsc {
&mut self.event_manager &mut self.event_manager
} }
@ -149,15 +144,15 @@ impl EventManagerWrapper {
} }
} }
pub struct EventHandler { pub struct EventHandler<VerificationReporter: VerificationReportingProvider> {
pub event_man_wrapper: EventManagerWrapper, pub event_man_wrapper: EventManagerWrapper,
pub pus_event_handler: PusEventHandler, pub pus_event_handler: PusEventHandler<VerificationReporter>,
} }
impl EventHandler { impl<VerificationReporter: VerificationReportingProvider> EventHandler<VerificationReporter> {
pub fn new( pub fn new(
tm_sender: impl EcssTmSender, tm_sender: impl EcssTmSender,
verif_handler: VerificationReporterWithSender, verif_handler: VerificationReporter,
event_request_rx: mpsc::Receiver<EventRequestWithToken>, event_request_rx: mpsc::Receiver<EventRequestWithToken>,
) -> Self { ) -> Self {
let mut event_man_wrapper = EventManagerWrapper::new(); let mut event_man_wrapper = EventManagerWrapper::new();
@ -178,7 +173,7 @@ impl EventHandler {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn event_manager(&mut self) -> &mut MpscEventManager { pub fn event_manager(&mut self) -> &mut EventManagerWithBoundedMpsc {
self.event_man_wrapper.event_manager() self.event_man_wrapper.event_manager()
} }

View File

@ -44,7 +44,7 @@ use crate::tmtc::{
use crate::udp::{StaticUdpTmHandler, UdpTmtcServer}; use crate::udp::{StaticUdpTmHandler, UdpTmtcServer};
use satrs::pus::event_man::EventRequestWithToken; use satrs::pus::event_man::EventRequestWithToken;
use satrs::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender}; use satrs::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender};
use satrs::pus::{EcssTmSender, MpscTmAsVecSender, MpscTmInSharedPoolSender}; use satrs::pus::{EcssTmSender, TmAsVecSenderWithId, TmInSharedPoolSenderWithId};
use satrs::spacepackets::{time::cds::TimeProvider, time::TimeWriter}; use satrs::spacepackets::{time::cds::TimeProvider, time::TimeWriter};
use satrs::tmtc::CcsdsDistributor; use satrs::tmtc::CcsdsDistributor;
use satrs::ChannelId; use satrs::ChannelId;
@ -54,11 +54,13 @@ use std::sync::{Arc, RwLock};
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
fn create_verification_reporter(verif_sender: impl EcssTmSender) -> VerificationReporterWithSender { fn create_verification_reporter<Sender: EcssTmSender + Clone>(
verif_sender: Sender,
) -> VerificationReporterWithSender<Sender> {
let verif_cfg = VerificationReporterCfg::new(PUS_APID, 1, 2, 8).unwrap(); let verif_cfg = VerificationReporterCfg::new(PUS_APID, 1, 2, 8).unwrap();
// Every software component which needs to generate verification telemetry, gets a cloned // Every software component which needs to generate verification telemetry, gets a cloned
// verification reporter. // verification reporter.
VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender)) VerificationReporterWithSender::new(&verif_cfg, verif_sender)
} }
#[allow(dead_code)] #[allow(dead_code)]
@ -68,13 +70,13 @@ fn static_tmtc_pool_main() {
let shared_tc_pool = SharedTcPool { let shared_tc_pool = SharedTcPool {
pool: Arc::new(RwLock::new(tc_pool)), pool: Arc::new(RwLock::new(tc_pool)),
}; };
let (tc_source_tx, tc_source_rx) = channel(); let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50);
let (tm_funnel_tx, tm_funnel_rx) = channel(); let (tm_funnel_tx, tm_funnel_rx) = mpsc::sync_channel(50);
let (tm_server_tx, tm_server_rx) = channel(); let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
// Every software component which needs to generate verification telemetry, receives a cloned // Every software component which needs to generate verification telemetry, receives a cloned
// verification reporter. // verification reporter.
let verif_reporter = create_verification_reporter(MpscTmInSharedPoolSender::new( let verif_reporter = create_verification_reporter(TmInSharedPoolSenderWithId::new(
TmSenderId::PusVerification as ChannelId, TmSenderId::PusVerification as ChannelId,
"verif_sender", "verif_sender",
shared_tm_pool.clone(), shared_tm_pool.clone(),
@ -102,7 +104,7 @@ fn static_tmtc_pool_main() {
// The event task is the core handler to perform the event routing and TM handling as specified // The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation. // in the sat-rs documentation.
let mut event_handler = EventHandler::new( let mut event_handler = EventHandler::new(
MpscTmInSharedPoolSender::new( TmInSharedPoolSenderWithId::new(
TmSenderId::AllEvents as ChannelId, TmSenderId::AllEvents as ChannelId,
"ALL_EVENTS_TX", "ALL_EVENTS_TX",
shared_tm_pool.clone(), shared_tm_pool.clone(),
@ -180,7 +182,7 @@ fn static_tmtc_pool_main() {
); );
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT); let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let udp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver.clone())); let udp_ccsds_distributor = CcsdsDistributor::new(ccsds_receiver.clone());
let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor)) let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor))
.expect("creating UDP TMTC server failed"); .expect("creating UDP TMTC server failed");
let mut udp_tmtc_server = UdpTmtcServer { let mut udp_tmtc_server = UdpTmtcServer {
@ -191,7 +193,7 @@ fn static_tmtc_pool_main() {
}, },
}; };
let tcp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver)); let tcp_ccsds_distributor = CcsdsDistributor::new(ccsds_receiver);
let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192); let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192);
let sync_tm_tcp_source = SyncTcpTmSource::new(200); let sync_tm_tcp_source = SyncTcpTmSource::new(200);
let mut tcp_server = TcpTask::new( let mut tcp_server = TcpTask::new(
@ -202,7 +204,7 @@ fn static_tmtc_pool_main() {
.expect("tcp server creation failed"); .expect("tcp server creation failed");
let mut acs_task = AcsTask::new( let mut acs_task = AcsTask::new(
MpscTmInSharedPoolSender::new( TmInSharedPoolSenderWithId::new(
TmSenderId::AcsSubsystem as ChannelId, TmSenderId::AcsSubsystem as ChannelId,
"ACS_TASK_SENDER", "ACS_TASK_SENDER",
shared_tm_pool.clone(), shared_tm_pool.clone(),
@ -303,7 +305,7 @@ fn dyn_tmtc_pool_main() {
let (tm_server_tx, tm_server_rx) = channel(); let (tm_server_tx, tm_server_rx) = channel();
// Every software component which needs to generate verification telemetry, gets a cloned // Every software component which needs to generate verification telemetry, gets a cloned
// verification reporter. // verification reporter.
let verif_reporter = create_verification_reporter(MpscTmAsVecSender::new( let verif_reporter = create_verification_reporter(TmAsVecSenderWithId::new(
TmSenderId::PusVerification as ChannelId, TmSenderId::PusVerification as ChannelId,
"verif_sender", "verif_sender",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),
@ -324,7 +326,7 @@ fn dyn_tmtc_pool_main() {
// The event task is the core handler to perform the event routing and TM handling as specified // The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation. // in the sat-rs documentation.
let mut event_handler = EventHandler::new( let mut event_handler = EventHandler::new(
MpscTmAsVecSender::new( TmAsVecSenderWithId::new(
TmSenderId::AllEvents as ChannelId, TmSenderId::AllEvents as ChannelId,
"ALL_EVENTS_TX", "ALL_EVENTS_TX",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),
@ -394,7 +396,7 @@ fn dyn_tmtc_pool_main() {
); );
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT); let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let udp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver.clone())); let udp_ccsds_distributor = CcsdsDistributor::new(ccsds_receiver.clone());
let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor)) let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor))
.expect("creating UDP TMTC server failed"); .expect("creating UDP TMTC server failed");
let mut udp_tmtc_server = UdpTmtcServer { let mut udp_tmtc_server = UdpTmtcServer {
@ -404,7 +406,7 @@ fn dyn_tmtc_pool_main() {
}, },
}; };
let tcp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver)); let tcp_ccsds_distributor = CcsdsDistributor::new(ccsds_receiver);
let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192); let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192);
let sync_tm_tcp_source = SyncTcpTmSource::new(200); let sync_tm_tcp_source = SyncTcpTmSource::new(200);
let mut tcp_server = TcpTask::new( let mut tcp_server = TcpTask::new(
@ -415,7 +417,7 @@ fn dyn_tmtc_pool_main() {
.expect("tcp server creation failed"); .expect("tcp server creation failed");
let mut acs_task = AcsTask::new( let mut acs_task = AcsTask::new(
MpscTmAsVecSender::new( TmAsVecSenderWithId::new(
TmSenderId::AcsSubsystem as ChannelId, TmSenderId::AcsSubsystem as ChannelId,
"ACS_TASK_SENDER", "ACS_TASK_SENDER",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),

View File

@ -2,14 +2,17 @@ use log::{error, warn};
use satrs::action::ActionRequest; use satrs::action::ActionRequest;
use satrs::pool::{SharedStaticMemoryPool, StoreAddr}; use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::action::{PusActionToRequestConverter, PusService8ActionHandler}; use satrs::pus::action::{PusActionToRequestConverter, PusService8ActionHandler};
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::{ use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReporterWithSender, VerificationReportingProvider, FailParams, TcStateAccepted, VerificationReportingProvider, VerificationToken,
VerificationToken,
}; };
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSender, PusPacketHandlerResult, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, PusServiceHelper, PusPacketHandlingError, PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc,
TmInSharedPoolSenderWithBoundedMpsc, TmInSharedPoolSenderWithId,
}; };
use satrs::request::TargetAndApidId; use satrs::request::TargetAndApidId;
use satrs::spacepackets::ecss::tc::PusTcReader; use satrs::spacepackets::ecss::tc::PusTcReader;
@ -74,13 +77,18 @@ impl PusActionToRequestConverter for ExampleActionRequestConverter {
pub fn create_action_service_static( pub fn create_action_service_static(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::Sender<StoreAddr>, tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tc_pool: SharedStaticMemoryPool, tc_pool: SharedStaticMemoryPool,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>, pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter, action_router: GenericRequestRouter,
) -> Pus8Wrapper<EcssTcInSharedStoreConverter> { ) -> Pus8Wrapper<
let action_srv_tm_sender = MpscTmInSharedPoolSender::new( MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let action_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusAction as ChannelId, TmSenderId::PusAction as ChannelId,
"PUS_8_TM_SENDER", "PUS_8_TM_SENDER",
shared_tm_store.clone(), shared_tm_store.clone(),
@ -93,8 +101,8 @@ pub fn create_action_service_static(
); );
let pus_8_handler = PusService8ActionHandler::new( let pus_8_handler = PusService8ActionHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(action_srv_receiver), action_srv_receiver,
Box::new(action_srv_tm_sender), action_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048), EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
@ -108,11 +116,16 @@ pub fn create_action_service_static(
pub fn create_action_service_dynamic( pub fn create_action_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>, tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithVecMpscSender,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>, pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter, action_router: GenericRequestRouter,
) -> Pus8Wrapper<EcssTcInVecConverter> { ) -> Pus8Wrapper<
let action_srv_tm_sender = MpscTmAsVecSender::new( MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let action_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusAction as ChannelId, TmSenderId::PusAction as ChannelId,
"PUS_8_TM_SENDER", "PUS_8_TM_SENDER",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),
@ -124,8 +137,8 @@ pub fn create_action_service_dynamic(
); );
let pus_8_handler = PusService8ActionHandler::new( let pus_8_handler = PusService8ActionHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(action_srv_receiver), action_srv_receiver,
Box::new(action_srv_tm_sender), action_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInVecConverter::default(), EcssTcInVecConverter::default(),
@ -137,17 +150,30 @@ pub fn create_action_service_dynamic(
Pus8Wrapper { pus_8_handler } Pus8Wrapper { pus_8_handler }
} }
pub struct Pus8Wrapper<TcInMemConverter: EcssTcInMemConverter> { pub struct Pus8Wrapper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub(crate) pus_8_handler: PusService8ActionHandler< pub(crate) pus_8_handler: PusService8ActionHandler<
TcReceiver,
TmSender,
TcInMemConverter, TcInMemConverter,
VerificationReporterWithSender, VerificationReporter,
ExampleActionRequestConverter, ExampleActionRequestConverter,
GenericRequestRouter, GenericRequestRouter,
GenericRoutingErrorHandler<8>, GenericRoutingErrorHandler<8>,
>, >,
} }
impl<TcInMemConverter: EcssTcInMemConverter> Pus8Wrapper<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn handle_next_packet(&mut self) -> bool { pub fn handle_next_packet(&mut self) -> bool {
match self.pus_8_handler.handle_one_tc() { match self.pus_8_handler.handle_one_tc() {
Ok(result) => match result { Ok(result) => match result {

View File

@ -4,11 +4,15 @@ use log::{error, warn};
use satrs::pool::{SharedStaticMemoryPool, StoreAddr}; use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::event_man::EventRequestWithToken; use satrs::pus::event_man::EventRequestWithToken;
use satrs::pus::event_srv::PusService5EventHandler; use satrs::pus::event_srv::PusService5EventHandler;
use satrs::pus::verification::VerificationReporterWithSender; use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSender, PusPacketHandlerResult, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
}; };
use satrs::tmtc::tm_helper::SharedTmPool; use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::ChannelId; use satrs::ChannelId;
@ -16,13 +20,18 @@ use satrs_example::config::{TcReceiverId, TmSenderId, PUS_APID};
pub fn create_event_service_static( pub fn create_event_service_static(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::Sender<StoreAddr>, tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tc_pool: SharedStaticMemoryPool, tc_pool: SharedStaticMemoryPool,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>, pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>, event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> Pus5Wrapper<EcssTcInSharedStoreConverter> { ) -> Pus5Wrapper<
let event_srv_tm_sender = MpscTmInSharedPoolSender::new( MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let event_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusEvent as ChannelId, TmSenderId::PusEvent as ChannelId,
"PUS_5_TM_SENDER", "PUS_5_TM_SENDER",
shared_tm_store.clone(), shared_tm_store.clone(),
@ -35,8 +44,8 @@ pub fn create_event_service_static(
); );
let pus_5_handler = PusService5EventHandler::new( let pus_5_handler = PusService5EventHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(event_srv_receiver), event_srv_receiver,
Box::new(event_srv_tm_sender), event_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048), EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
@ -48,11 +57,16 @@ pub fn create_event_service_static(
pub fn create_event_service_dynamic( pub fn create_event_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>, tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithVecMpscSender,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>, pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>, event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> Pus5Wrapper<EcssTcInVecConverter> { ) -> Pus5Wrapper<
let event_srv_tm_sender = MpscTmAsVecSender::new( MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let event_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusEvent as ChannelId, TmSenderId::PusEvent as ChannelId,
"PUS_5_TM_SENDER", "PUS_5_TM_SENDER",
tm_funnel_tx, tm_funnel_tx,
@ -64,8 +78,8 @@ pub fn create_event_service_dynamic(
); );
let pus_5_handler = PusService5EventHandler::new( let pus_5_handler = PusService5EventHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(event_srv_receiver), event_srv_receiver,
Box::new(event_srv_tm_sender), event_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInVecConverter::default(), EcssTcInVecConverter::default(),
@ -75,11 +89,23 @@ pub fn create_event_service_dynamic(
Pus5Wrapper { pus_5_handler } Pus5Wrapper { pus_5_handler }
} }
pub struct Pus5Wrapper<TcInMemConverter: EcssTcInMemConverter> { pub struct Pus5Wrapper<
pub pus_5_handler: PusService5EventHandler<TcInMemConverter, VerificationReporterWithSender>, TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus_5_handler:
PusService5EventHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
} }
impl<TcInMemConverter: EcssTcInMemConverter> Pus5Wrapper<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn handle_next_packet(&mut self) -> bool { pub fn handle_next_packet(&mut self) -> bool {
match self.pus_5_handler.handle_one_tc() { match self.pus_5_handler.handle_one_tc() {
Ok(result) => match result { Ok(result) => match result {

View File

@ -2,14 +2,17 @@ use log::{error, warn};
use satrs::hk::{CollectionIntervalFactor, HkRequest}; use satrs::hk::{CollectionIntervalFactor, HkRequest};
use satrs::pool::{SharedStaticMemoryPool, StoreAddr}; use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::hk::{PusHkToRequestConverter, PusService3HkHandler}; use satrs::pus::hk::{PusHkToRequestConverter, PusService3HkHandler};
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::{ use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReporterWithSender, VerificationReportingProvider, FailParams, TcStateAccepted, VerificationReportingProvider, VerificationToken,
VerificationToken,
}; };
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSender, PusPacketHandlerResult, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, PusServiceHelper, PusPacketHandlingError, PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc,
TmInSharedPoolSenderWithBoundedMpsc, TmInSharedPoolSenderWithId,
}; };
use satrs::request::TargetAndApidId; use satrs::request::TargetAndApidId;
use satrs::spacepackets::ecss::tc::PusTcReader; use satrs::spacepackets::ecss::tc::PusTcReader;
@ -143,13 +146,18 @@ impl PusHkToRequestConverter for ExampleHkRequestConverter {
pub fn create_hk_service_static( pub fn create_hk_service_static(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::Sender<StoreAddr>, tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tc_pool: SharedStaticMemoryPool, tc_pool: SharedStaticMemoryPool,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>, pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter, request_router: GenericRequestRouter,
) -> Pus3Wrapper<EcssTcInSharedStoreConverter> { ) -> Pus3Wrapper<
let hk_srv_tm_sender = MpscTmInSharedPoolSender::new( MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let hk_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusHk as ChannelId, TmSenderId::PusHk as ChannelId,
"PUS_3_TM_SENDER", "PUS_3_TM_SENDER",
shared_tm_store.clone(), shared_tm_store.clone(),
@ -159,8 +167,8 @@ pub fn create_hk_service_static(
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx); MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
let pus_3_handler = PusService3HkHandler::new( let pus_3_handler = PusService3HkHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(hk_srv_receiver), hk_srv_receiver,
Box::new(hk_srv_tm_sender), hk_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool, 2048), EcssTcInSharedStoreConverter::new(tc_pool, 2048),
@ -174,11 +182,16 @@ pub fn create_hk_service_static(
pub fn create_hk_service_dynamic( pub fn create_hk_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>, tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithVecMpscSender,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>, pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter, request_router: GenericRequestRouter,
) -> Pus3Wrapper<EcssTcInVecConverter> { ) -> Pus3Wrapper<
let hk_srv_tm_sender = MpscTmAsVecSender::new( MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let hk_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusHk as ChannelId, TmSenderId::PusHk as ChannelId,
"PUS_3_TM_SENDER", "PUS_3_TM_SENDER",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),
@ -187,8 +200,8 @@ pub fn create_hk_service_dynamic(
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx); MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
let pus_3_handler = PusService3HkHandler::new( let pus_3_handler = PusService3HkHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(hk_srv_receiver), hk_srv_receiver,
Box::new(hk_srv_tm_sender), hk_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInVecConverter::default(), EcssTcInVecConverter::default(),
@ -200,17 +213,30 @@ pub fn create_hk_service_dynamic(
Pus3Wrapper { pus_3_handler } Pus3Wrapper { pus_3_handler }
} }
pub struct Pus3Wrapper<TcInMemConverter: EcssTcInMemConverter> { pub struct Pus3Wrapper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub(crate) pus_3_handler: PusService3HkHandler< pub(crate) pus_3_handler: PusService3HkHandler<
TcReceiver,
TmSender,
TcInMemConverter, TcInMemConverter,
VerificationReporterWithSender, VerificationReporter,
ExampleHkRequestConverter, ExampleHkRequestConverter,
GenericRequestRouter, GenericRequestRouter,
GenericRoutingErrorHandler<3>, GenericRoutingErrorHandler<3>,
>, >,
} }
impl<TcInMemConverter: EcssTcInMemConverter> Pus3Wrapper<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn handle_next_packet(&mut self) -> bool { pub fn handle_next_packet(&mut self) -> bool {
match self.pus_3_handler.handle_one_tc() { match self.pus_3_handler.handle_one_tc() {
Ok(result) => match result { Ok(result) => match result {

View File

@ -1,8 +1,6 @@
use crate::tmtc::MpscStoreAndSendError; use crate::tmtc::MpscStoreAndSendError;
use log::warn; use log::warn;
use satrs::pus::verification::{ use satrs::pus::verification::{FailParams, VerificationReportingProvider};
FailParams, StdVerifReporterWithSender, VerificationReportingProvider,
};
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, GenericRoutingError, PusPacketHandlerResult, PusRoutingErrorHandler, TcInMemory, EcssTcAndToken, GenericRoutingError, PusPacketHandlerResult, PusRoutingErrorHandler, TcInMemory,
}; };
@ -28,8 +26,8 @@ pub struct PusTcMpscRouter {
pub action_service_receiver: Sender<EcssTcAndToken>, pub action_service_receiver: Sender<EcssTcAndToken>,
} }
pub struct PusReceiver { pub struct PusReceiver<VerificationReporter: VerificationReportingProvider> {
pub verif_reporter: StdVerifReporterWithSender, pub verif_reporter: VerificationReporter,
pub pus_router: PusTcMpscRouter, pub pus_router: PusTcMpscRouter,
stamp_helper: TimeStampHelper, stamp_helper: TimeStampHelper,
} }
@ -61,8 +59,8 @@ impl TimeStampHelper {
} }
} }
impl PusReceiver { impl<VerificationReporter: VerificationReportingProvider> PusReceiver<VerificationReporter> {
pub fn new(verif_reporter: StdVerifReporterWithSender, pus_router: PusTcMpscRouter) -> Self { pub fn new(verif_reporter: VerificationReporter, pus_router: PusTcMpscRouter) -> Self {
Self { Self {
verif_reporter, verif_reporter,
pus_router, pus_router,
@ -71,7 +69,7 @@ impl PusReceiver {
} }
} }
impl PusReceiver { impl<VerificationReporter: VerificationReportingProvider> PusReceiver<VerificationReporter> {
pub fn handle_tc_packet( pub fn handle_tc_packet(
&mut self, &mut self,
tc_in_memory: TcInMemory, tc_in_memory: TcInMemory,

View File

@ -5,11 +5,15 @@ use log::{error, info, warn};
use satrs::pool::{PoolProvider, StaticMemoryPool, StoreAddr}; use satrs::pool::{PoolProvider, StaticMemoryPool, StoreAddr};
use satrs::pus::scheduler::{PusScheduler, TcInfo}; use satrs::pus::scheduler::{PusScheduler, TcInfo};
use satrs::pus::scheduler_srv::PusService11SchedHandler; use satrs::pus::scheduler_srv::PusService11SchedHandler;
use satrs::pus::verification::VerificationReporterWithSender; use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSender, PusPacketHandlerResult, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
}; };
use satrs::tmtc::tm_helper::SharedTmPool; use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::ChannelId; use satrs::ChannelId;
@ -51,15 +55,31 @@ impl TcReleaser for mpsc::Sender<Vec<u8>> {
} }
} }
pub struct Pus11Wrapper<TcInMemConverter: EcssTcInMemConverter> { pub struct Pus11Wrapper<
pub pus_11_handler: TcReceiver: EcssTcReceiverCore,
PusService11SchedHandler<TcInMemConverter, VerificationReporterWithSender, PusScheduler>, TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus_11_handler: PusService11SchedHandler<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
PusScheduler,
>,
pub sched_tc_pool: StaticMemoryPool, pub sched_tc_pool: StaticMemoryPool,
pub releaser_buf: [u8; 4096], pub releaser_buf: [u8; 4096],
pub tc_releaser: Box<dyn TcReleaser + Send>, pub tc_releaser: Box<dyn TcReleaser + Send>,
} }
impl<TcInMemConverter: EcssTcInMemConverter> Pus11Wrapper<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn release_tcs(&mut self) { pub fn release_tcs(&mut self) {
let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool { let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool {
self.tc_releaser.release(enabled, info, tc) self.tc_releaser.release(enabled, info, tc)
@ -110,13 +130,18 @@ impl<TcInMemConverter: EcssTcInMemConverter> Pus11Wrapper<TcInMemConverter> {
pub fn create_scheduler_service_static( pub fn create_scheduler_service_static(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::Sender<StoreAddr>, tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tc_releaser: PusTcSourceProviderSharedPool, tc_releaser: PusTcSourceProviderSharedPool,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>, pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool, sched_tc_pool: StaticMemoryPool,
) -> Pus11Wrapper<EcssTcInSharedStoreConverter> { ) -> Pus11Wrapper<
let sched_srv_tm_sender = MpscTmInSharedPoolSender::new( MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let sched_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusSched as ChannelId, TmSenderId::PusSched as ChannelId,
"PUS_11_TM_SENDER", "PUS_11_TM_SENDER",
shared_tm_store.clone(), shared_tm_store.clone(),
@ -131,8 +156,8 @@ pub fn create_scheduler_service_static(
.expect("Creating PUS Scheduler failed"); .expect("Creating PUS Scheduler failed");
let pus_11_handler = PusService11SchedHandler::new( let pus_11_handler = PusService11SchedHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(sched_srv_receiver), sched_srv_receiver,
Box::new(sched_srv_tm_sender), sched_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_releaser.clone_backing_pool(), 2048), EcssTcInSharedStoreConverter::new(tc_releaser.clone_backing_pool(), 2048),
@ -149,12 +174,17 @@ pub fn create_scheduler_service_static(
pub fn create_scheduler_service_dynamic( pub fn create_scheduler_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>, tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithVecMpscSender,
tc_source_sender: mpsc::Sender<Vec<u8>>, tc_source_sender: mpsc::Sender<Vec<u8>>,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>, pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool, sched_tc_pool: StaticMemoryPool,
) -> Pus11Wrapper<EcssTcInVecConverter> { ) -> Pus11Wrapper<
let sched_srv_tm_sender = MpscTmAsVecSender::new( MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let sched_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusSched as ChannelId, TmSenderId::PusSched as ChannelId,
"PUS_11_TM_SENDER", "PUS_11_TM_SENDER",
tm_funnel_tx, tm_funnel_tx,
@ -168,8 +198,8 @@ pub fn create_scheduler_service_dynamic(
.expect("Creating PUS Scheduler failed"); .expect("Creating PUS Scheduler failed");
let pus_11_handler = PusService11SchedHandler::new( let pus_11_handler = PusService11SchedHandler::new(
PusServiceHelper::new( PusServiceHelper::new(
Box::new(sched_srv_receiver), sched_srv_receiver,
Box::new(sched_srv_tm_sender), sched_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInVecConverter::default(), EcssTcInVecConverter::default(),

View File

@ -1,25 +1,44 @@
use satrs::pus::EcssTcInMemConverter; use satrs::pus::{
verification::VerificationReportingProvider, EcssTcInMemConverter, EcssTcReceiverCore,
EcssTmSenderCore,
};
use super::{ use super::{
action::Pus8Wrapper, event::Pus5Wrapper, hk::Pus3Wrapper, scheduler::Pus11Wrapper, action::Pus8Wrapper, event::Pus5Wrapper, hk::Pus3Wrapper, scheduler::Pus11Wrapper,
test::Service17CustomWrapper, test::Service17CustomWrapper,
}; };
pub struct PusStack<TcInMemConverter: EcssTcInMemConverter> { pub struct PusStack<
event_srv: Pus5Wrapper<TcInMemConverter>, TcReceiver: EcssTcReceiverCore,
hk_srv: Pus3Wrapper<TcInMemConverter>, TmSender: EcssTmSenderCore,
action_srv: Pus8Wrapper<TcInMemConverter>, TcInMemConverter: EcssTcInMemConverter,
schedule_srv: Pus11Wrapper<TcInMemConverter>, VerificationReporter: VerificationReportingProvider,
test_srv: Service17CustomWrapper<TcInMemConverter>, > {
event_srv: Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
hk_srv: Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
action_srv: Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
schedule_srv: Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
test_srv: Service17CustomWrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
} }
impl<TcInMemConverter: EcssTcInMemConverter> PusStack<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> PusStack<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn new( pub fn new(
hk_srv: Pus3Wrapper<TcInMemConverter>, hk_srv: Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
event_srv: Pus5Wrapper<TcInMemConverter>, event_srv: Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
action_srv: Pus8Wrapper<TcInMemConverter>, action_srv: Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
schedule_srv: Pus11Wrapper<TcInMemConverter>, schedule_srv: Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
test_srv: Service17CustomWrapper<TcInMemConverter>, test_srv: Service17CustomWrapper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
) -> Self { ) -> Self {
Self { Self {
event_srv, event_srv,

View File

@ -2,12 +2,15 @@ use log::{info, warn};
use satrs::params::Params; use satrs::params::Params;
use satrs::pool::{SharedStaticMemoryPool, StoreAddr}; use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::test::PusService17TestHandler; use satrs::pus::test::PusService17TestHandler;
use satrs::pus::verification::{FailParams, VerificationReportingProvider};
use satrs::pus::verification::{ use satrs::pus::verification::{
FailParams, VerificationReporterWithSender, VerificationReportingProvider, VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
}; };
use satrs::pus::{ use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter, MpscTcReceiver, MpscTmAsVecSender, EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter, EcssTcReceiverCore,
MpscTmInSharedPoolSender, PusPacketHandlerResult, PusServiceHelper, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
}; };
use satrs::spacepackets::ecss::tc::PusTcReader; use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::PusPacket; use satrs::spacepackets::ecss::PusPacket;
@ -21,13 +24,18 @@ use std::sync::mpsc::{self, Sender};
pub fn create_test_service_static( pub fn create_test_service_static(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::Sender<StoreAddr>, tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tc_pool: SharedStaticMemoryPool, tc_pool: SharedStaticMemoryPool,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>, event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>, pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> Service17CustomWrapper<EcssTcInSharedStoreConverter> { ) -> Service17CustomWrapper<
let test_srv_tm_sender = MpscTmInSharedPoolSender::new( MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let test_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusTest as ChannelId, TmSenderId::PusTest as ChannelId,
"PUS_17_TM_SENDER", "PUS_17_TM_SENDER",
shared_tm_store.clone(), shared_tm_store.clone(),
@ -39,8 +47,8 @@ pub fn create_test_service_static(
pus_test_rx, pus_test_rx,
); );
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new( let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
Box::new(test_srv_receiver), test_srv_receiver,
Box::new(test_srv_tm_sender), test_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool, 2048), EcssTcInSharedStoreConverter::new(tc_pool, 2048),
@ -53,11 +61,16 @@ pub fn create_test_service_static(
pub fn create_test_service_dynamic( pub fn create_test_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>, tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithSender, verif_reporter: VerificationReporterWithVecMpscSender,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>, event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>, pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> Service17CustomWrapper<EcssTcInVecConverter> { ) -> Service17CustomWrapper<
let test_srv_tm_sender = MpscTmAsVecSender::new( MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let test_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusTest as ChannelId, TmSenderId::PusTest as ChannelId,
"PUS_17_TM_SENDER", "PUS_17_TM_SENDER",
tm_funnel_tx.clone(), tm_funnel_tx.clone(),
@ -68,8 +81,8 @@ pub fn create_test_service_dynamic(
pus_test_rx, pus_test_rx,
); );
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new( let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
Box::new(test_srv_receiver), test_srv_receiver,
Box::new(test_srv_tm_sender), test_srv_tm_sender,
PUS_APID, PUS_APID,
verif_reporter.clone(), verif_reporter.clone(),
EcssTcInVecConverter::default(), EcssTcInVecConverter::default(),
@ -80,12 +93,24 @@ pub fn create_test_service_dynamic(
} }
} }
pub struct Service17CustomWrapper<TcInMemConverter: EcssTcInMemConverter> { pub struct Service17CustomWrapper<
pub pus17_handler: PusService17TestHandler<TcInMemConverter, VerificationReporterWithSender>, TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus17_handler:
PusService17TestHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub test_srv_event_sender: Sender<(EventU32, Option<Params>)>, pub test_srv_event_sender: Sender<(EventU32, Option<Params>)>,
} }
impl<TcInMemConverter: EcssTcInMemConverter> Service17CustomWrapper<TcInMemConverter> { impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Service17CustomWrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{
pub fn handle_next_packet(&mut self) -> bool { pub fn handle_next_packet(&mut self) -> bool {
let res = self.pus17_handler.handle_one_tc(); let res = self.pus17_handler.handle_one_tc();
if res.is_err() { if res.is_err() {

View File

@ -6,11 +6,14 @@ use std::{
use log::{info, warn}; use log::{info, warn};
use satrs::{ use satrs::{
hal::std::tcp_server::{ServerConfig, TcpSpacepacketsServer}, hal::std::tcp_server::{ServerConfig, TcpSpacepacketsServer},
pus::ReceivesEcssPusTc,
spacepackets::PacketId, spacepackets::PacketId,
tmtc::{CcsdsDistributor, CcsdsError, TmPacketSourceCore}, tmtc::{CcsdsDistributor, CcsdsError, ReceivesCcsdsTc, TmPacketSourceCore},
}; };
use satrs_example::config::PUS_APID; use satrs_example::config::PUS_APID;
use crate::ccsds::CcsdsReceiver;
pub const PACKET_ID_LOOKUP: &[PacketId] = &[PacketId::const_tc(true, PUS_APID)]; pub const PACKET_ID_LOOKUP: &[PacketId] = &[PacketId::const_tc(true, PUS_APID)];
#[derive(Default, Clone)] #[derive(Default, Clone)]
@ -69,20 +72,37 @@ impl TmPacketSourceCore for SyncTcpTmSource {
} }
} }
pub struct TcpTask<MpscErrorType: 'static> { pub type TcpServerType<TcSource, MpscErrorType> = TcpSpacepacketsServer<
server: TcpSpacepacketsServer<
(), (),
CcsdsError<MpscErrorType>, CcsdsError<MpscErrorType>,
SyncTcpTmSource, SyncTcpTmSource,
CcsdsDistributor<MpscErrorType>, CcsdsDistributor<CcsdsReceiver<TcSource, MpscErrorType>, MpscErrorType>,
>, >;
pub struct TcpTask<
TcSource: ReceivesCcsdsTc<Error = MpscErrorType>
+ ReceivesEcssPusTc<Error = MpscErrorType>
+ Clone
+ Send
+ 'static,
MpscErrorType: 'static,
> {
server: TcpServerType<TcSource, MpscErrorType>,
} }
impl<MpscErrorType: 'static + core::fmt::Debug> TcpTask<MpscErrorType> { impl<
TcSource: ReceivesCcsdsTc<Error = MpscErrorType>
+ ReceivesEcssPusTc<Error = MpscErrorType>
+ Clone
+ Send
+ 'static,
MpscErrorType: 'static + core::fmt::Debug,
> TcpTask<TcSource, MpscErrorType>
{
pub fn new( pub fn new(
cfg: ServerConfig, cfg: ServerConfig,
tm_source: SyncTcpTmSource, tm_source: SyncTcpTmSource,
tc_receiver: CcsdsDistributor<MpscErrorType>, tc_receiver: CcsdsDistributor<CcsdsReceiver<TcSource, MpscErrorType>, MpscErrorType>,
) -> Result<Self, std::io::Error> { ) -> Result<Self, std::io::Error> {
Ok(Self { Ok(Self {
server: TcpSpacepacketsServer::new( server: TcpSpacepacketsServer::new(

View File

@ -1,6 +1,6 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
sync::mpsc::{Receiver, Sender}, sync::mpsc::{self},
}; };
use log::info; use log::info;
@ -77,16 +77,16 @@ impl TmFunnelCommon {
pub struct TmFunnelStatic { pub struct TmFunnelStatic {
common: TmFunnelCommon, common: TmFunnelCommon,
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
tm_funnel_rx: Receiver<StoreAddr>, tm_funnel_rx: mpsc::Receiver<StoreAddr>,
tm_server_tx: Sender<StoreAddr>, tm_server_tx: mpsc::SyncSender<StoreAddr>,
} }
impl TmFunnelStatic { impl TmFunnelStatic {
pub fn new( pub fn new(
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
sync_tm_tcp_source: SyncTcpTmSource, sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: Receiver<StoreAddr>, tm_funnel_rx: mpsc::Receiver<StoreAddr>,
tm_server_tx: Sender<StoreAddr>, tm_server_tx: mpsc::SyncSender<StoreAddr>,
) -> Self { ) -> Self {
Self { Self {
common: TmFunnelCommon::new(sync_tm_tcp_source), common: TmFunnelCommon::new(sync_tm_tcp_source),
@ -123,15 +123,15 @@ impl TmFunnelStatic {
pub struct TmFunnelDynamic { pub struct TmFunnelDynamic {
common: TmFunnelCommon, common: TmFunnelCommon,
tm_funnel_rx: Receiver<Vec<u8>>, tm_funnel_rx: mpsc::Receiver<Vec<u8>>,
tm_server_tx: Sender<Vec<u8>>, tm_server_tx: mpsc::Sender<Vec<u8>>,
} }
impl TmFunnelDynamic { impl TmFunnelDynamic {
pub fn new( pub fn new(
sync_tm_tcp_source: SyncTcpTmSource, sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: Receiver<Vec<u8>>, tm_funnel_rx: mpsc::Receiver<Vec<u8>>,
tm_server_tx: Sender<Vec<u8>>, tm_server_tx: mpsc::Sender<Vec<u8>>,
) -> Self { ) -> Self {
Self { Self {
common: TmFunnelCommon::new(sync_tm_tcp_source), common: TmFunnelCommon::new(sync_tm_tcp_source),

View File

@ -1,7 +1,10 @@
use log::warn; use log::warn;
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::{EcssTcAndToken, ReceivesEcssPusTc}; use satrs::pus::{EcssTcAndToken, ReceivesEcssPusTc};
use satrs::spacepackets::SpHeader; use satrs::spacepackets::SpHeader;
use std::sync::mpsc::{self, Receiver, SendError, Sender, TryRecvError}; use std::sync::mpsc::{self, Receiver, SendError, Sender, SyncSender, TryRecvError};
use thiserror::Error; use thiserror::Error;
use crate::pus::PusReceiver; use crate::pus::PusReceiver;
@ -37,7 +40,7 @@ impl SharedTcPool {
#[derive(Clone)] #[derive(Clone)]
pub struct PusTcSourceProviderSharedPool { pub struct PusTcSourceProviderSharedPool {
pub tc_source: Sender<StoreAddr>, pub tc_source: SyncSender<StoreAddr>,
pub shared_pool: SharedTcPool, pub shared_pool: SharedTcPool,
} }
@ -97,14 +100,14 @@ pub struct TcSourceTaskStatic {
shared_tc_pool: SharedTcPool, shared_tc_pool: SharedTcPool,
tc_receiver: Receiver<StoreAddr>, tc_receiver: Receiver<StoreAddr>,
tc_buf: [u8; 4096], tc_buf: [u8; 4096],
pus_receiver: PusReceiver, pus_receiver: PusReceiver<VerificationReporterWithSharedPoolMpscBoundedSender>,
} }
impl TcSourceTaskStatic { impl TcSourceTaskStatic {
pub fn new( pub fn new(
shared_tc_pool: SharedTcPool, shared_tc_pool: SharedTcPool,
tc_receiver: Receiver<StoreAddr>, tc_receiver: Receiver<StoreAddr>,
pus_receiver: PusReceiver, pus_receiver: PusReceiver<VerificationReporterWithSharedPoolMpscBoundedSender>,
) -> Self { ) -> Self {
Self { Self {
shared_tc_pool, shared_tc_pool,
@ -161,11 +164,14 @@ impl TcSourceTaskStatic {
// TC source components where the heap is the backing memory of the received telecommands. // TC source components where the heap is the backing memory of the received telecommands.
pub struct TcSourceTaskDynamic { pub struct TcSourceTaskDynamic {
pub tc_receiver: Receiver<Vec<u8>>, pub tc_receiver: Receiver<Vec<u8>>,
pus_receiver: PusReceiver, pus_receiver: PusReceiver<VerificationReporterWithVecMpscSender>,
} }
impl TcSourceTaskDynamic { impl TcSourceTaskDynamic {
pub fn new(tc_receiver: Receiver<Vec<u8>>, pus_receiver: PusReceiver) -> Self { pub fn new(
tc_receiver: Receiver<Vec<u8>>,
pus_receiver: PusReceiver<VerificationReporterWithVecMpscSender>,
) -> Self {
Self { Self {
tc_receiver, tc_receiver,
pus_receiver, pus_receiver,

23
satrs-minisim/Cargo.toml Normal file
View File

@ -0,0 +1,23 @@
[package]
name = "satrs-minisim"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = { version = "1", features = ["derive"] }
serde_json = "1"
log = "0.4"
[dependencies.asynchronix]
version = "0.2.1"
# path = "../../asynchronix/asynchronix"
# git = "https://github.com/us-irs/asynchronix.git"
# branch = "clock-not-sendable"
[dependencies.satrs]
path = "../satrs"
[dev-dependencies]
delegate = "0.12"

261
satrs-minisim/src/acs.rs Normal file
View File

@ -0,0 +1,261 @@
use std::{f32::consts::PI, sync::mpsc, time::Duration};
use asynchronix::{
model::{Model, Output},
time::Scheduler,
};
use satrs::power::SwitchStateBinary;
use satrs_minisim::{
acs::{MgmSensorValues, MgtDipole, MgtReply, MGT_GEN_MAGNETIC_FIELD},
SimDevice, SimReply,
};
use crate::time::current_millis;
// Earth magnetic field varies between -30 uT and 30 uT
const AMPLITUDE_MGM: f32 = 0.03;
// Lets start with a simple frequency here.
const FREQUENCY_MGM: f32 = 1.0;
const PHASE_X: f32 = 0.0;
// Different phases to have different values on the other axes.
const PHASE_Y: f32 = 0.1;
const PHASE_Z: f32 = 0.2;
/// Simple model for a magnetometer where the measure magnetic fields are modeled with sine waves.
///
/// Please note that that a more realistic MGM model wouold include the following components
/// which are not included here to simplify the model:
///
/// 1. It would probably generate signed [i16] values which need to be converted to SI units
/// because it is a digital sensor
/// 2. It would sample the magnetic field at a high fixed rate. This might not be possible for
/// a general purpose OS, but self self-sampling at a relatively high rate (20-40 ms) might
/// stil lbe possible.
pub struct MagnetometerModel {
pub switch_state: SwitchStateBinary,
pub periodicity: Duration,
pub external_mag_field: Option<MgmSensorValues>,
pub reply_sender: mpsc::Sender<SimReply>,
}
impl MagnetometerModel {
pub fn new(periodicity: Duration, reply_sender: mpsc::Sender<SimReply>) -> Self {
Self {
switch_state: SwitchStateBinary::Off,
periodicity,
external_mag_field: None,
reply_sender,
}
}
pub async fn switch_device(&mut self, switch_state: SwitchStateBinary) {
self.switch_state = switch_state;
}
pub async fn send_sensor_values(&mut self, _: (), scheduler: &Scheduler<Self>) {
let current_time = scheduler.time();
println!("current monotonic time: {:?}", current_time);
let value = self.calculate_current_mgm_tuple(current_millis(scheduler.time()));
let reply = SimReply {
device: SimDevice::Mgm,
reply: serde_json::to_string(&value).unwrap(),
};
self.reply_sender
.send(reply)
.expect("sending MGM sensor values failed");
}
// Devices like magnetorquers generate a strong magnetic field which overrides the default
// model for the measured magnetic field.
pub async fn apply_external_magnetic_field(&mut self, field: MgmSensorValues) {
self.external_mag_field = Some(field);
}
fn calculate_current_mgm_tuple(&mut self, time_ms: u64) -> MgmSensorValues {
if SwitchStateBinary::On == self.switch_state {
if let Some(ext_field) = self.external_mag_field {
return ext_field;
}
let base_sin_val = 2.0 * PI * FREQUENCY_MGM * (time_ms as f32 / 1000.0);
return MgmSensorValues {
x: AMPLITUDE_MGM * (base_sin_val + PHASE_X).sin(),
y: AMPLITUDE_MGM * (base_sin_val + PHASE_Y).sin(),
z: AMPLITUDE_MGM * (base_sin_val + PHASE_Z).sin(),
};
}
MgmSensorValues {
x: 0.0,
y: 0.0,
z: 0.0,
}
}
}
impl Model for MagnetometerModel {}
pub struct MagnetorquerModel {
switch_state: SwitchStateBinary,
torquing: bool,
torque_dipole: MgtDipole,
pub gen_magnetic_field: Output<MgmSensorValues>,
reply_sender: mpsc::Sender<SimReply>,
}
impl MagnetorquerModel {
pub fn new(reply_sender: mpsc::Sender<SimReply>) -> Self {
Self {
switch_state: SwitchStateBinary::Off,
torquing: false,
torque_dipole: MgtDipole::default(),
gen_magnetic_field: Output::new(),
reply_sender,
}
}
pub async fn apply_torque(
&mut self,
duration_and_dipole: (Duration, MgtDipole),
scheduler: &Scheduler<Self>,
) {
self.torque_dipole = duration_and_dipole.1;
self.torquing = true;
if scheduler
.schedule_event(duration_and_dipole.0, Self::clear_torque, ())
.is_err()
{
log::warn!("torque clearing can only be set for a future time.");
}
self.generate_magnetic_field(()).await;
}
pub async fn clear_torque(&mut self, _: ()) {
self.torque_dipole = MgtDipole::default();
self.torquing = false;
self.generate_magnetic_field(()).await;
}
pub async fn switch_device(&mut self, switch_state: SwitchStateBinary) {
self.switch_state = switch_state;
self.generate_magnetic_field(()).await;
}
pub async fn request_housekeeping_data(&mut self, _: (), scheduler: &Scheduler<Self>) {
scheduler
.schedule_event(Duration::from_millis(15), Self::send_housekeeping_data, ())
.expect("requesting housekeeping data failed")
}
pub fn send_housekeeping_data(&mut self) {
let mgt_reply = MgtReply::Hk(self.torque_dipole);
self.reply_sender
.send(SimReply::new(SimDevice::Mgt, mgt_reply))
.unwrap();
}
fn calc_magnetic_field(&self, _: MgtDipole) -> MgmSensorValues {
// Simplified model: Just returns some fixed magnetic field for now.
// Later, we could make this more fancy by incorporating the commanded dipole.
MGT_GEN_MAGNETIC_FIELD
}
/// A torquing magnetorquer generates a magnetic field. This function can be used to apply
/// the magnetic field.
async fn generate_magnetic_field(&mut self, _: ()) {
if self.switch_state != SwitchStateBinary::On || !self.torquing {
return;
}
self.gen_magnetic_field
.send(self.calc_magnetic_field(self.torque_dipole))
.await;
}
}
impl Model for MagnetorquerModel {}
#[cfg(test)]
pub mod tests {
use std::time::Duration;
use satrs::power::SwitchStateBinary;
use satrs_minisim::{
acs::{MgmRequest, MgmSensorValues},
SimDevice, SimRequest,
};
use crate::{
eps::{self, PcduRequest},
test_helpers::SimTestbench,
};
#[test]
fn test_basic_mgm_request() {
let mut sim_testbench = SimTestbench::new();
let mgm_request = MgmRequest::RequestSensorData;
let request = SimRequest::new(SimDevice::Mgm, mgm_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
let sim_reply = sim_testbench.try_receive_next_reply();
assert!(sim_reply.is_some());
let sim_reply = sim_reply.unwrap();
assert_eq!(sim_reply.device, SimDevice::Mgm);
let reply: MgmSensorValues = serde_json::from_str(&sim_reply.reply)
.expect("failed to deserialize MGM sensor values");
assert_eq!(reply.x, 0.0);
assert_eq!(reply.y, 0.0);
assert_eq!(reply.z, 0.0);
}
#[test]
fn test_basic_mgm_request_switched_on() {
let mut sim_testbench = SimTestbench::new();
let pcdu_request = PcduRequest::SwitchDevice {
switch: eps::PcduSwitch::Mgm,
state: SwitchStateBinary::On,
};
let mut request = SimRequest::new(SimDevice::Pcdu, pcdu_request);
sim_testbench
.send_request(request)
.expect("sending MGM switch request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
let mut sim_reply_res = sim_testbench.try_receive_next_reply();
assert!(sim_reply_res.is_none());
let mgm_request = MgmRequest::RequestSensorData;
request = SimRequest::new(SimDevice::Mgm, mgm_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
sim_reply_res = sim_testbench.try_receive_next_reply();
assert!(sim_reply_res.is_some());
let mut sim_reply = sim_reply_res.unwrap();
assert_eq!(sim_reply.device, SimDevice::Mgm);
let first_reply: MgmSensorValues = serde_json::from_str(&sim_reply.reply)
.expect("failed to deserialize MGM sensor values");
let mgm_request = MgmRequest::RequestSensorData;
sim_testbench.step_by(Duration::from_millis(50));
request = SimRequest::new(SimDevice::Mgm, mgm_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
sim_reply_res = sim_testbench.try_receive_next_reply();
assert!(sim_reply_res.is_some());
sim_reply = sim_reply_res.unwrap();
let second_reply: MgmSensorValues = serde_json::from_str(&sim_reply.reply)
.expect("failed to deserialize MGM sensor values");
// Check that the values are changing.
assert!(first_reply != second_reply);
}
#[test]
fn test_mgm_request_with_mgt_switched_on() {}
}

View File

@ -0,0 +1,145 @@
use std::{sync::mpsc, time::Duration};
use asynchronix::{
simulation::{Address, Simulation},
time::{Clock, MonotonicTime, SystemClock},
};
use satrs_minisim::{
acs::{MgmRequest, MgtRequest},
SimRequest,
};
use crate::{
acs::{MagnetometerModel, MagnetorquerModel},
eps::{PcduModel, PcduRequest},
};
// The simulation controller processes requests and drives the simulation.
pub struct SimController {
pub sys_clock: SystemClock,
pub request_receiver: mpsc::Receiver<SimRequest>,
pub simulation: Simulation,
pub mgm_addr: Address<MagnetometerModel>,
pub pcdu_addr: Address<PcduModel>,
pub mgt_addr: Address<MagnetorquerModel>,
}
impl SimController {
pub fn new(
sys_clock: SystemClock,
request_receiver: mpsc::Receiver<SimRequest>,
simulation: Simulation,
mgm_addr: Address<MagnetometerModel>,
pcdu_addr: Address<PcduModel>,
mgt_addr: Address<MagnetorquerModel>,
) -> Self {
Self {
sys_clock,
request_receiver,
simulation,
mgm_addr,
pcdu_addr,
mgt_addr,
}
}
pub fn run(&mut self, start_time: MonotonicTime, udp_polling_interval_ms: u64) {
let mut t = start_time + Duration::from_millis(udp_polling_interval_ms);
self.sys_clock.synchronize(t);
loop {
// Check for UDP requests every millisecond. Shift the simulator ahead here to prevent
// replies lying in the past.
t += Duration::from_millis(udp_polling_interval_ms);
self.simulation
.step_until(t)
.expect("simulation step failed");
self.handle_sim_requests();
self.sys_clock.synchronize(t);
}
}
pub fn handle_sim_requests(&mut self) {
loop {
match self.request_receiver.try_recv() {
Ok(request) => match request.device() {
satrs_minisim::SimDevice::Mgm => self.handle_mgm_request(request.request()),
satrs_minisim::SimDevice::Mgt => self.handle_mgt_request(request.request()),
satrs_minisim::SimDevice::Pcdu => self.handle_pcdu_request(request.request()),
},
Err(e) => match e {
mpsc::TryRecvError::Empty => break,
mpsc::TryRecvError::Disconnected => {
panic!("all request sender disconnected")
}
},
}
}
}
fn handle_mgm_request(&mut self, request: &str) {
let mgm_request: serde_json::Result<MgmRequest> = serde_json::from_str(request);
if mgm_request.is_err() {
log::warn!("received invalid MGM request: {}", mgm_request.unwrap_err());
return;
}
let mgm_request = mgm_request.unwrap();
match mgm_request {
MgmRequest::RequestSensorData => {
self.simulation.send_event(
MagnetometerModel::send_sensor_values,
(),
&self.mgm_addr,
);
}
}
}
fn handle_pcdu_request(&mut self, request: &str) {
let pcdu_request: serde_json::Result<PcduRequest> = serde_json::from_str(request);
if pcdu_request.is_err() {
log::warn!(
"received invalid PCDU request: {}",
pcdu_request.unwrap_err()
);
return;
}
let pcdu_request = pcdu_request.unwrap();
match pcdu_request {
PcduRequest::RequestSwitchInfo => {
self.simulation
.send_event(PcduModel::request_switch_info, (), &self.pcdu_addr);
}
PcduRequest::SwitchDevice { switch, state } => {
self.simulation.send_event(
PcduModel::switch_device,
(switch, state),
&self.pcdu_addr,
);
}
}
}
fn handle_mgt_request(&mut self, request: &str) {
let mgt_request: serde_json::Result<MgtRequest> = serde_json::from_str(request);
if mgt_request.is_err() {
log::warn!(
"received invalid PCDU request: {}",
mgt_request.unwrap_err()
);
return;
}
let mgt_request = mgt_request.unwrap();
match mgt_request {
MgtRequest::ApplyTorque { duration, dipole } => self.simulation.send_event(
MagnetorquerModel::apply_torque,
(duration, dipole),
&self.mgt_addr,
),
MgtRequest::RequestHk => self.simulation.send_event(
MagnetorquerModel::request_housekeeping_data,
(),
&self.mgt_addr,
),
}
}
}

172
satrs-minisim/src/eps.rs Normal file
View File

@ -0,0 +1,172 @@
use std::{collections::HashMap, sync::mpsc, time::Duration};
use asynchronix::{
model::{Model, Output},
time::Scheduler,
};
use satrs::power::SwitchStateBinary;
use satrs_minisim::{SimDevice, SimReply};
use serde::{Deserialize, Serialize};
pub const SWITCH_INFO_DELAY_MS: u64 = 10;
pub type SwitchMap = HashMap<PcduSwitch, SwitchStateBinary>;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub enum PcduSwitch {
Mgm = 0,
Mgt = 1,
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum PcduRequest {
SwitchDevice {
switch: PcduSwitch,
state: SwitchStateBinary,
},
RequestSwitchInfo,
}
pub struct PcduModel {
pub switcher_map: SwitchMap,
pub mgm_switch: Output<SwitchStateBinary>,
pub mgt_switch: Output<SwitchStateBinary>,
pub reply_sender: mpsc::Sender<SimReply>,
}
impl PcduModel {
pub fn new(reply_sender: mpsc::Sender<SimReply>) -> Self {
let mut switcher_map = HashMap::new();
switcher_map.insert(PcduSwitch::Mgm, SwitchStateBinary::Off);
switcher_map.insert(PcduSwitch::Mgt, SwitchStateBinary::Off);
Self {
switcher_map,
mgm_switch: Output::new(),
mgt_switch: Output::new(),
reply_sender,
}
}
pub async fn request_switch_info(&mut self, _: (), scheduler: &Scheduler<Self>) {
scheduler
.schedule_event(
Duration::from_millis(SWITCH_INFO_DELAY_MS),
Self::send_switch_info,
(),
)
.expect("requesting switch info failed");
}
pub fn send_switch_info(&mut self) {
let switch_info = self.switcher_map.clone();
let reply = SimReply::new(SimDevice::Pcdu, switch_info);
self.reply_sender.send(reply).unwrap();
}
pub async fn switch_device(
&mut self,
switch_and_target_state: (PcduSwitch, SwitchStateBinary),
) {
let val = self
.switcher_map
.get_mut(&switch_and_target_state.0)
.unwrap_or_else(|| panic!("switch {:?} not found", switch_and_target_state.0));
*val = switch_and_target_state.1;
match switch_and_target_state.0 {
PcduSwitch::Mgm => {
self.mgm_switch.send(switch_and_target_state.1).await;
}
PcduSwitch::Mgt => {
self.mgt_switch.send(switch_and_target_state.1).await;
}
}
}
}
impl Model for PcduModel {}
#[cfg(test)]
pub mod tests {
use super::*;
use std::time::Duration;
use satrs_minisim::{SimDevice, SimRequest};
use crate::test_helpers::SimTestbench;
fn check_switch_state(sim_testbench: &mut SimTestbench, expected_switch_map: &SwitchMap) {
let pcdu_request = PcduRequest::RequestSwitchInfo;
let request = SimRequest::new(SimDevice::Pcdu, pcdu_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
let sim_reply = sim_testbench.try_receive_next_reply();
assert!(sim_reply.is_some());
let sim_reply = sim_reply.unwrap();
assert_eq!(sim_reply.device, SimDevice::Pcdu);
let switch_map: super::SwitchMap =
serde_json::from_str(&sim_reply.reply).expect("failed to deserialize PCDU switch info");
assert_eq!(&switch_map, expected_switch_map);
}
fn get_all_off_switch_map() -> SwitchMap {
let mut switcher_map = SwitchMap::new();
switcher_map.insert(super::PcduSwitch::Mgm, super::SwitchStateBinary::Off);
switcher_map.insert(super::PcduSwitch::Mgt, super::SwitchStateBinary::Off);
switcher_map
}
fn test_pcdu_switching_single_switch(switch: PcduSwitch) {
let mut sim_testbench = SimTestbench::new();
let pcdu_request = PcduRequest::SwitchDevice {
switch,
state: SwitchStateBinary::On,
};
let request = SimRequest::new(SimDevice::Pcdu, pcdu_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step();
let mut switcher_map = get_all_off_switch_map();
*switcher_map.get_mut(&switch).unwrap() = SwitchStateBinary::On;
check_switch_state(&mut sim_testbench, &switcher_map);
}
#[test]
fn test_pcdu_switcher_request() {
let mut sim_testbench = SimTestbench::new();
let pcdu_request = PcduRequest::RequestSwitchInfo;
let request = SimRequest::new(SimDevice::Pcdu, pcdu_request);
sim_testbench
.send_request(request)
.expect("sending MGM request failed");
sim_testbench.handle_sim_requests();
sim_testbench.step_by(Duration::from_millis(1));
let sim_reply = sim_testbench.try_receive_next_reply();
assert!(sim_reply.is_none());
// Reply takes 20ms
sim_testbench.step_by(Duration::from_millis(25));
let sim_reply = sim_testbench.try_receive_next_reply();
assert!(sim_reply.is_some());
let sim_reply = sim_reply.unwrap();
assert_eq!(sim_reply.device, SimDevice::Pcdu);
let switch_map: super::SwitchMap =
serde_json::from_str(&sim_reply.reply).expect("failed to deserialize PCDU switch info");
assert_eq!(switch_map, get_all_off_switch_map());
}
#[test]
fn test_pcdu_switching_mgm() {
test_pcdu_switching_single_switch(PcduSwitch::Mgm);
}
#[test]
fn test_pcdu_switching_mgt() {
test_pcdu_switching_single_switch(PcduSwitch::Mgt);
}
}

99
satrs-minisim/src/lib.rs Normal file
View File

@ -0,0 +1,99 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum SimDevice {
Mgm,
Mgt,
Pcdu,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SimRequest {
device: SimDevice,
request: String,
}
impl SimRequest {
pub fn new<T: Serialize>(device: SimDevice, reply: T) -> Self {
Self {
device,
request: serde_json::to_string(&reply).unwrap(),
}
}
pub fn device(&self) -> SimDevice {
self.device
}
pub fn request(&self) -> &String {
&self.request
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SimReply {
pub device: SimDevice,
pub reply: String,
}
impl SimReply {
pub fn new<T: Serialize>(device: SimDevice, reply: T) -> Self {
Self {
device,
reply: serde_json::to_string(&reply).unwrap(),
}
}
pub fn reply(&self) -> &String {
&self.reply
}
}
pub mod acs {
use std::time::Duration;
use super::*;
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum MgmRequest {
RequestSensorData,
}
// Normally, small magnetometers generate their output as a signed 16 bit raw format or something
// similar which needs to be converted to a signed float value with physical units. We will
// simplify this now and generate the signed float values directly.
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct MgmSensorValues {
pub x: f32,
pub y: f32,
pub z: f32,
}
pub const MGT_GEN_MAGNETIC_FIELD: MgmSensorValues = MgmSensorValues {
x: 0.03,
y: -0.03,
z: 0.03,
};
// Simple model using i16 values.
#[derive(Default, Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct MgtDipole {
pub x: i16,
pub y: i16,
pub z: i16,
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum MgtRequest {
ApplyTorque {
duration: Duration,
dipole: MgtDipole,
},
RequestHk,
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum MgtReply {
Hk(MgtDipole),
}
}

107
satrs-minisim/src/main.rs Normal file
View File

@ -0,0 +1,107 @@
use acs::{MagnetometerModel, MagnetorquerModel};
use asynchronix::simulation::{Mailbox, SimInit};
use asynchronix::time::{MonotonicTime, SystemClock};
use controller::SimController;
use eps::PcduModel;
use satrs_minisim::{SimReply, SimRequest};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration, SystemTime};
use udp::{SharedSocketAddr, UdpTcServer, UdpTmClient};
mod acs;
mod controller;
mod eps;
#[cfg(test)]
mod test_helpers;
mod time;
mod udp;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ThreadingModel {
Default = 0,
Single = 1,
}
fn create_sim_controller(
threading_model: ThreadingModel,
start_time: MonotonicTime,
reply_sender: mpsc::Sender<SimReply>,
request_receiver: mpsc::Receiver<SimRequest>,
) -> SimController {
// Instantiate models and their mailboxes.
let mgm_model = MagnetometerModel::new(Duration::from_millis(50), reply_sender.clone());
let mgm_mailbox = Mailbox::new();
let mgm_addr = mgm_mailbox.address();
let pcdu_mailbox = Mailbox::new();
let pcdu_addr = pcdu_mailbox.address();
let mgt_mailbox = Mailbox::new();
let mgt_addr = mgt_mailbox.address();
let mut pcdu_model = PcduModel::new(reply_sender.clone());
pcdu_model
.mgm_switch
.connect(MagnetometerModel::switch_device, &mgm_addr);
let mut mgt_model = MagnetorquerModel::new(reply_sender.clone());
// Input connections.
pcdu_model
.mgt_switch
.connect(MagnetorquerModel::switch_device, &mgt_addr);
// Output connections.
mgt_model
.gen_magnetic_field
.connect(MagnetometerModel::apply_external_magnetic_field, &mgm_addr);
// Instantiate the simulator
let sys_clock = SystemClock::from_system_time(start_time, SystemTime::now());
let sim_init = if threading_model == ThreadingModel::Single {
SimInit::with_num_threads(1)
} else {
SimInit::new()
};
let simulation = sim_init
.add_model(mgm_model, mgm_mailbox)
.add_model(pcdu_model, pcdu_mailbox)
.add_model(mgt_model, mgt_mailbox)
.init(start_time);
SimController::new(
sys_clock,
request_receiver,
simulation,
mgm_addr,
pcdu_addr,
mgt_addr,
)
}
fn main() {
let shared_socket_addr = SharedSocketAddr::default();
let (request_sender, request_receiver) = mpsc::channel();
let (reply_sender, reply_receiver) = mpsc::channel();
let t0 = MonotonicTime::EPOCH;
let mut sim_ctrl =
create_sim_controller(ThreadingModel::Default, t0, reply_sender, request_receiver);
// This thread schedules the simulator.
let sim_thread = thread::spawn(move || {
sim_ctrl.run(t0, 1);
});
let mut server = UdpTcServer::new(request_sender, shared_socket_addr.clone()).unwrap();
// This thread manages the simulator UDP TC server.
let udp_tc_thread = thread::spawn(move || {
server.run();
});
let mut client = UdpTmClient::new(reply_receiver, 200, shared_socket_addr);
// This thread manages the simulator UDP TM client.
let udp_tm_thread = thread::spawn(move || {
client.run();
});
sim_thread.join().expect("joining simulation thread failed");
udp_tc_thread.join().expect("joining UDP TC thread failed");
udp_tm_thread.join().expect("joining UDP TM thread failed");
}

View File

@ -0,0 +1,56 @@
use delegate::delegate;
use std::{sync::mpsc, time::Duration};
use asynchronix::time::MonotonicTime;
use satrs_minisim::{SimReply, SimRequest};
use crate::{controller::SimController, create_sim_controller, ThreadingModel};
pub struct SimTestbench {
pub sim_controller: SimController,
pub reply_receiver: mpsc::Receiver<SimReply>,
pub request_sender: mpsc::Sender<SimRequest>,
}
impl SimTestbench {
pub fn new() -> Self {
let (request_sender, request_receiver) = mpsc::channel();
let (reply_sender, reply_receiver) = mpsc::channel();
let t0 = MonotonicTime::EPOCH;
let sim_ctrl =
create_sim_controller(ThreadingModel::Single, t0, reply_sender, request_receiver);
Self {
sim_controller: sim_ctrl,
reply_receiver,
request_sender,
}
}
delegate! {
to self.sim_controller {
pub fn handle_sim_requests(&mut self);
}
to self.sim_controller.simulation {
pub fn step(&mut self);
pub fn step_by(&mut self, duration: Duration);
}
}
pub fn send_request(&self, request: SimRequest) -> Result<(), mpsc::SendError<SimRequest>> {
self.request_sender.send(request)
}
pub fn try_receive_next_reply(&self) -> Option<SimReply> {
match self.reply_receiver.try_recv() {
Ok(reply) => Some(reply),
Err(e) => {
if e == mpsc::TryRecvError::Empty {
None
} else {
panic!("reply_receiver disconnected");
}
}
}
}
}

View File

@ -0,0 +1,5 @@
use asynchronix::time::MonotonicTime;
pub fn current_millis(time: MonotonicTime) -> u64 {
(time.as_secs() as u64 * 1000) + (time.subsec_nanos() as u64 / 1_000_000)
}

158
satrs-minisim/src/udp.rs Normal file
View File

@ -0,0 +1,158 @@
use std::{
collections::VecDeque,
net::{SocketAddr, UdpSocket},
sync::{mpsc, Arc, Mutex},
time::Duration,
};
use satrs_minisim::{SimReply, SimRequest};
pub type SharedSocketAddr = Arc<Mutex<Option<SocketAddr>>>;
// A UDP server which handles all TC received by a client application.
pub struct UdpTcServer {
socket: UdpSocket,
request_sender: mpsc::Sender<SimRequest>,
shared_last_sender: SharedSocketAddr,
}
impl UdpTcServer {
pub fn new(
request_sender: mpsc::Sender<SimRequest>,
shared_last_sender: SharedSocketAddr,
) -> std::io::Result<Self> {
let socket = UdpSocket::bind("0.0.0.0:7303")?;
Ok(Self {
socket,
request_sender,
shared_last_sender,
})
}
pub fn run(&mut self) {
let mut last_socket_addr = None;
loop {
// Buffer to store incoming data.
let mut buffer = [0u8; 4096];
// Block until data is received. `recv_from` returns the number of bytes read and the
// sender's address.
let (bytes_read, src) = self
.socket
.recv_from(&mut buffer)
.expect("could not read from socket");
// Convert the buffer into a string slice and print the message.
let req_string = std::str::from_utf8(&buffer[..bytes_read])
.expect("Could not write buffer as string");
println!("Received from {}: {}", src, req_string);
let sim_req: serde_json::Result<SimRequest> = serde_json::from_str(req_string);
if sim_req.is_err() {
log::warn!(
"received UDP request with invalid format: {}",
sim_req.unwrap_err()
);
continue;
}
self.request_sender.send(sim_req.unwrap()).unwrap();
// Only set last sender if it has changed.
if last_socket_addr.is_some() && src != last_socket_addr.unwrap() {
self.shared_last_sender.lock().unwrap().replace(src);
}
last_socket_addr = Some(src);
}
}
}
// A helper object which sends back all replies to the UDP client.
//
// This helper is scheduled separately to minimize the delay between the requests and replies.
pub struct UdpTmClient {
reply_receiver: mpsc::Receiver<SimReply>,
reply_queue: VecDeque<SimReply>,
max_num_replies: usize,
socket: UdpSocket,
last_sender: SharedSocketAddr,
}
impl UdpTmClient {
pub fn new(
reply_receiver: mpsc::Receiver<SimReply>,
max_num_replies: usize,
last_sender: SharedSocketAddr,
) -> Self {
let socket =
UdpSocket::bind("127.0.0.1:0").expect("creating UDP client for TM sender failed");
Self {
reply_receiver,
reply_queue: VecDeque::new(),
max_num_replies,
socket,
last_sender,
}
}
pub fn run(&mut self) {
loop {
let processed_replies = self.process_replies();
let last_sender_lock = self
.last_sender
.lock()
.expect("locking last UDP sender failed");
let last_sender = *last_sender_lock;
drop(last_sender_lock);
let mut sent_replies = false;
if let Some(last_sender) = last_sender {
sent_replies = self.send_replies(last_sender);
}
if !processed_replies && !sent_replies {
std::thread::sleep(Duration::from_millis(20));
}
}
}
fn process_replies(&mut self) -> bool {
let mut processed_replies = false;
loop {
match self.reply_receiver.try_recv() {
Ok(reply) => {
if self.reply_queue.len() >= self.max_num_replies {
self.reply_queue.pop_front();
}
self.reply_queue.push_back(reply);
processed_replies = true;
}
Err(e) => match e {
mpsc::TryRecvError::Empty => return processed_replies,
mpsc::TryRecvError::Disconnected => {
log::error!("all UDP reply senders disconnected")
}
},
}
}
}
fn send_replies(&mut self, last_sender: SocketAddr) -> bool {
let mut sent_replies = false;
self.socket
.connect(last_sender)
.expect("connecting to last sender failed");
while !self.reply_queue.is_empty() {
let next_reply_to_send = self.reply_queue.pop_front().unwrap();
self.socket
.send(
serde_json::to_string(&next_reply_to_send)
.unwrap()
.as_bytes(),
)
.expect("sending reply failed");
sent_replies = true;
}
sent_replies
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_basic_udp_tc_reception() {}
}

View File

@ -8,6 +8,27 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased] # [unreleased]
## Changed
- Refactored `EventManager` to heavily use generics instead of trait objects.
- `SendEventProvider` -> `EventSendProvider`. `id` trait method renamed to `channel_id`.
- `ListenerTable` -> `ListenerMapProvider`
- `SenderTable` -> `SenderMapProvider`
- There is an `EventManagerWithMpsc` and a `EventManagerWithBoundedMpsc` helper type now.
- Refactored ECSS TM sender abstractions to be generic over different message queue backends.
- Refactored Verification Reporter abstractions and implementation to be generic over the sender
instead of using trait objects.
- `PusServiceProvider` renamed to `PusServiceDistributor` to make the purpose of the object
more clear
- `PusServiceProvider::handle_pus_tc_packet` renamed to `PusServiceDistributor::distribute_packet`.
- `PusServiceDistibutor` and `CcsdsDistributor` now use generics instead of trait objects.
This makes accessing the concrete trait implementations more easy as well.
## Fixed
- Update deprecated API for `PusScheduler::insert_wrapped_tc_cds_short`
and `PusScheduler::insert_wrapped_tc_cds_long`.
# [v0.2.0-rc.0] 2024-02-21 # [v0.2.0-rc.0] 2024-02-21
## Added ## Added

View File

@ -40,3 +40,24 @@ impl TargetedActionRequest {
} }
} }
} }
/// A reply to an action request.
#[non_exhaustive]
#[derive(Clone, Eq, PartialEq, Debug)]
pub enum ActionReply {
CompletionFailed(ActionId),
StepFailed {
id: ActionId,
step: u32,
},
Completed(ActionId),
#[cfg(feature = "alloc")]
CompletedStringId(alloc::string::String),
#[cfg(feature = "alloc")]
CompletionFailedStringId(alloc::string::String),
#[cfg(feature = "alloc")]
StepFailedStringId {
id: alloc::string::String,
step: u32,
},
}

View File

@ -7,7 +7,7 @@ use spacepackets::ByteConversionError;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use stdmod::*; pub use std_mod::*;
pub const CRC_32: Crc<u32> = Crc::<u32>::new(&CRC_32_CKSUM); pub const CRC_32: Crc<u32> = Crc::<u32>::new(&CRC_32_CKSUM);
@ -148,12 +148,11 @@ pub trait VirtualFilestore {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub mod stdmod { pub mod std_mod {
use super::*; use super::*;
use std::{ use std::{
fs::{self, File, OpenOptions}, fs::{self, File, OpenOptions},
io::{BufReader, Read, Seek, SeekFrom, Write}, io::{BufReader, Read, Seek, SeekFrom, Write},
path::Path,
}; };
#[derive(Default)] #[derive(Default)]

View File

@ -10,27 +10,27 @@
//! [sat-rs book chapter](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/book/events.html) //! [sat-rs book chapter](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/book/events.html)
//! about events first: //! about events first:
//! //!
//! The event manager has a listener table abstracted by the [ListenerTable], which maps //! The event manager has a listener table abstracted by the [ListenerMapProvider], which maps
//! listener groups identified by [ListenerKey]s to a [sender ID][ChannelId]. //! listener groups identified by [ListenerKey]s to a [sender ID][ChannelId].
//! It also contains a sender table abstracted by the [SenderTable] which maps these sender IDs //! It also contains a sender table abstracted by the [SenderMapProvider] which maps these sender
//! to a concrete [SendEventProvider]s. A simple approach would be to use one send event provider //! IDs to concrete [EventSendProvider]s. A simple approach would be to use one send event provider
//! for each OBSW thread and then subscribe for all interesting events for a particular thread //! for each OBSW thread and then subscribe for all interesting events for a particular thread
//! using the send event provider ID. //! using the send event provider ID.
//! //!
//! This can be done with the [EventManager] like this: //! This can be done with the [EventManager] like this:
//! //!
//! 1. Provide a concrete [EventReceiver] implementation. This abstraction allow to use different //! 1. Provide a concrete [EventReceiveProvider] implementation. This abstraction allow to use different
//! message queue backends. A straightforward implementation where dynamic memory allocation is //! message queue backends. A straightforward implementation where dynamic memory allocation is
//! not a big concern could use [std::sync::mpsc::channel] to do this and is provided in //! not a big concern could use [std::sync::mpsc::channel] to do this and is provided in
//! form of the [MpscEventReceiver]. //! form of the [MpscEventReceiver].
//! 2. To set up event creators, create channel pairs using some message queue implementation. //! 2. To set up event creators, create channel pairs using some message queue implementation.
//! Each event creator gets a (cloned) sender component which allows it to send events to the //! Each event creator gets a (cloned) sender component which allows it to send events to the
//! manager. //! manager.
//! 3. The event manager receives the receiver component as part of a [EventReceiver] //! 3. The event manager receives the receiver component as part of a [EventReceiveProvider]
//! implementation so all events are routed to the manager. //! implementation so all events are routed to the manager.
//! 4. Create the [send event providers][SendEventProvider]s which allow routing events to //! 4. Create the [send event providers][EventSendProvider]s which allow routing events to
//! subscribers. You can now use their [sender IDs][SendEventProvider::id] to subscribe for //! subscribers. You can now use their [sender IDs][EventSendProvider::channel_id] to subscribe
//! event groups, for example by using the [EventManager::subscribe_single] method. //! for event groups, for example by using the [EventManager::subscribe_single] method.
//! 5. Add the send provider as well using the [EventManager::add_sender] call so the event //! 5. Add the send provider as well using the [EventManager::add_sender] call so the event
//! manager can route listener groups to a the send provider. //! manager can route listener groups to a the send provider.
//! //!
@ -41,24 +41,22 @@
//! //!
//! # Examples //! # Examples
//! //!
//! You can check [integration test](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-core/tests/pus_events.rs) //! You can check [integration test](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs/tests/pus_events.rs)
//! for a concrete example using multi-threading where events are routed to //! for a concrete example using multi-threading where events are routed to
//! different threads. //! different threads.
use crate::events::{EventU16, EventU32, GenericEvent, LargestEventRaw, LargestGroupIdRaw}; use crate::events::{EventU16, EventU32, GenericEvent, LargestEventRaw, LargestGroupIdRaw};
use crate::params::{Params, ParamsHeapless}; use crate::params::{Params, ParamsHeapless};
#[cfg(feature = "alloc")] use crate::queue::GenericSendError;
use alloc::boxed::Box; use core::marker::PhantomData;
#[cfg(feature = "alloc")]
use alloc::vec;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::slice::Iter; use core::slice::Iter;
#[cfg(feature = "alloc")]
use hashbrown::HashMap;
use crate::ChannelId; use crate::ChannelId;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use stdmod::*; pub use std_mod::*;
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub enum ListenerKey { pub enum ListenerKey {
@ -75,108 +73,110 @@ pub type EventWithAuxData<Event> = (Event, Option<Params>);
pub type EventU32WithAuxData = EventWithAuxData<EventU32>; pub type EventU32WithAuxData = EventWithAuxData<EventU32>;
pub type EventU16WithAuxData = EventWithAuxData<EventU16>; pub type EventU16WithAuxData = EventWithAuxData<EventU16>;
pub trait SendEventProvider<Provider: GenericEvent, AuxDataProvider = Params> { pub trait EventSendProvider<EV: GenericEvent, AuxDataProvider = Params> {
type Error; fn channel_id(&self) -> ChannelId;
fn id(&self) -> ChannelId; fn send_no_data(&self, event: EV) -> Result<(), GenericSendError> {
fn send_no_data(&self, event: Provider) -> Result<(), Self::Error> {
self.send(event, None) self.send(event, None)
} }
fn send(&self, event: Provider, aux_data: Option<AuxDataProvider>) -> Result<(), Self::Error>;
fn send(&self, event: EV, aux_data: Option<AuxDataProvider>) -> Result<(), GenericSendError>;
} }
/// Generic abstraction for an event receiver. /// Generic abstraction for an event receiver.
pub trait EventReceiver<Event: GenericEvent, AuxDataProvider = Params> { pub trait EventReceiveProvider<Event: GenericEvent, AuxDataProvider = Params> {
/// This function has to be provided by any event receiver. A receive call may or may not return /// This function has to be provided by any event receiver. A call may or may not return
/// an event. /// an event and optional auxiliary data.
/// fn try_recv_event(&self) -> Option<(Event, Option<AuxDataProvider>)>;
/// To allow returning arbitrary additional auxiliary data, a mutable slice is passed to the
/// [Self::receive] call as well. Receivers can write data to this slice, but care must be taken
/// to avoid panics due to size missmatches or out of bound writes.
fn receive(&self) -> Option<(Event, Option<AuxDataProvider>)>;
} }
pub trait ListenerTable { pub trait ListenerMapProvider {
fn get_listeners(&self) -> Vec<ListenerKey>; #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
fn get_listeners(&self) -> alloc::vec::Vec<ListenerKey>;
fn contains_listener(&self, key: &ListenerKey) -> bool; fn contains_listener(&self, key: &ListenerKey) -> bool;
fn get_listener_ids(&self, key: &ListenerKey) -> Option<Iter<ChannelId>>; fn get_listener_ids(&self, key: &ListenerKey) -> Option<Iter<ChannelId>>;
fn add_listener(&mut self, key: ListenerKey, sender_id: ChannelId) -> bool; fn add_listener(&mut self, key: ListenerKey, sender_id: ChannelId) -> bool;
fn remove_duplicates(&mut self, key: &ListenerKey); fn remove_duplicates(&mut self, key: &ListenerKey);
} }
pub trait SenderTable<SendProviderError, Event: GenericEvent = EventU32, AuxDataProvider = Params> { pub trait SenderMapProvider<
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent = EventU32,
AUX = Params,
>
{
fn contains_send_event_provider(&self, id: &ChannelId) -> bool; fn contains_send_event_provider(&self, id: &ChannelId) -> bool;
fn get_send_event_provider(
&self, fn get_send_event_provider(&self, id: &ChannelId) -> Option<&SP>;
id: &ChannelId, fn add_send_event_provider(&mut self, send_provider: SP) -> bool;
) -> Option<&dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>>;
fn add_send_event_provider(
&mut self,
send_provider: Box<
dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>,
>,
) -> bool;
} }
/// Generic event manager implementation. /// Generic event manager implementation.
/// ///
/// # Generics /// # Generics
/// ///
/// * `SendProviderError`: [SendEventProvider] error type /// * `ERP`: [EventReceiveProvider] used to receive all events.
/// * `Event`: Concrete event provider, currently either [EventU32] or [EventU16] /// * `SMP`: [SenderMapProvider] which maps channel IDs to send providers.
/// * `AuxDataProvider`: Concrete auxiliary data provider, currently either [Params] or /// * `LTR`: [ListenerMapProvider] which maps listener keys to channel IDs.
/// [ParamsHeapless] /// * `SP`: [EventSendProvider] contained within the sender map which sends the events.
pub struct EventManager<SendProviderError, Event: GenericEvent = EventU32, AuxDataProvider = Params> /// * `EV`: The event type. This type must implement the [GenericEvent]. Currently only [EventU32]
{ /// and [EventU16] are supported.
listener_table: Box<dyn ListenerTable>, /// * `AUX`: Auxiliary data which is sent with the event to provide optional context information
sender_table: Box<dyn SenderTable<SendProviderError, Event, AuxDataProvider>>, pub struct EventManager<
event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>, ERP: EventReceiveProvider<EV, AUX>,
SMP: SenderMapProvider<SP, EV, AUX>,
LTR: ListenerMapProvider,
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent = EventU32,
AUX = Params,
> {
event_receiver: ERP,
sender_map: SMP,
listener_map: LTR,
phantom: core::marker::PhantomData<(SP, EV, AUX)>,
} }
/// Safety: It is safe to implement [Send] because all fields in the [EventManager] are [Send]
/// as well
#[cfg(feature = "std")]
unsafe impl<E, Event: GenericEvent + Send, AuxDataProvider: Send> Send
for EventManager<E, Event, AuxDataProvider>
{
}
#[cfg(feature = "std")]
pub type EventManagerWithMpscQueue<Event, AuxDataProvider> = EventManager<
std::sync::mpsc::SendError<(Event, Option<AuxDataProvider>)>,
Event,
AuxDataProvider,
>;
#[derive(Debug)] #[derive(Debug)]
pub enum EventRoutingResult<Event: GenericEvent, AuxDataProvider> { pub enum EventRoutingResult<EV: GenericEvent, AUX> {
/// No event was received /// No event was received
Empty, Empty,
/// An event was received and routed. /// An event was received and routed to listeners.
/// The first tuple entry will contain the number of recipients. Handled {
Handled(u32, Event, Option<AuxDataProvider>), num_recipients: u32,
event: EV,
aux_data: Option<AUX>,
},
} }
#[derive(Debug)] #[derive(Debug)]
pub enum EventRoutingError<E> { pub enum EventRoutingError {
SendError(E), Send(GenericSendError),
NoSendersForKey(ListenerKey), NoSendersForKey(ListenerKey),
NoSenderForId(ChannelId), NoSenderForId(ChannelId),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct EventRoutingErrorsWithResult<Event: GenericEvent, AuxDataProvider, E> { pub struct EventRoutingErrorsWithResult<EV: GenericEvent, AUX> {
pub result: EventRoutingResult<Event, AuxDataProvider>, pub result: EventRoutingResult<EV, AUX>,
pub errors: [Option<EventRoutingError<E>>; 3], pub errors: [Option<EventRoutingError>; 3],
} }
impl<E, Event: GenericEvent + Copy> EventManager<E, Event> { impl<
ER: EventReceiveProvider<EV, AUX>,
S: SenderMapProvider<SP, EV, AUX>,
L: ListenerMapProvider,
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent + Copy,
AUX: Clone,
> EventManager<ER, S, L, SP, EV, AUX>
{
pub fn remove_duplicates(&mut self, key: &ListenerKey) { pub fn remove_duplicates(&mut self, key: &ListenerKey) {
self.listener_table.remove_duplicates(key) self.listener_map.remove_duplicates(key)
} }
/// Subscribe for a unique event. /// Subscribe for a unique event.
pub fn subscribe_single(&mut self, event: &Event, sender_id: ChannelId) { pub fn subscribe_single(&mut self, event: &EV, sender_id: ChannelId) {
self.update_listeners(ListenerKey::Single(event.raw_as_largest_type()), sender_id); self.update_listeners(ListenerKey::Single(event.raw_as_largest_type()), sender_id);
} }
@ -194,49 +194,37 @@ impl<E, Event: GenericEvent + Copy> EventManager<E, Event> {
} }
} }
impl<E: 'static, Event: GenericEvent + Copy + 'static, AuxDataProvider: Clone + 'static> impl<
EventManager<E, Event, AuxDataProvider> ERP: EventReceiveProvider<EV, AUX>,
SMP: SenderMapProvider<SP, EV, AUX>,
LTR: ListenerMapProvider,
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent + Copy,
AUX: Clone,
> EventManager<ERP, SMP, LTR, SP, EV, AUX>
{ {
/// Create an event manager where the sender table will be the [DefaultSenderTableProvider] pub fn new_with_custom_maps(event_receiver: ERP, sender_map: SMP, listener_map: LTR) -> Self {
/// and the listener table will be the [DefaultListenerTableProvider].
pub fn new(event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>) -> Self {
let listener_table: Box<DefaultListenerTableProvider> = Box::default();
let sender_table: Box<DefaultSenderTableProvider<E, Event, AuxDataProvider>> =
Box::default();
Self::new_custom_tables(listener_table, sender_table, event_receiver)
}
}
impl<E, Event: GenericEvent + Copy, AuxDataProvider: Clone>
EventManager<E, Event, AuxDataProvider>
{
pub fn new_custom_tables(
listener_table: Box<dyn ListenerTable>,
sender_table: Box<dyn SenderTable<E, Event, AuxDataProvider>>,
event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>,
) -> Self {
EventManager { EventManager {
listener_table, listener_map,
sender_table, sender_map,
event_receiver, event_receiver,
phantom: PhantomData,
} }
} }
pub fn add_sender( /// Add a new sender component which can be used to send events to subscribers.
&mut self, pub fn add_sender(&mut self, send_provider: SP) {
send_provider: impl SendEventProvider<Event, AuxDataProvider, Error = E> + 'static,
) {
if !self if !self
.sender_table .sender_map
.contains_send_event_provider(&send_provider.id()) .contains_send_event_provider(&send_provider.channel_id())
{ {
self.sender_table self.sender_map.add_send_event_provider(send_provider);
.add_send_event_provider(Box::new(send_provider));
} }
} }
/// Generic function to update the event subscribers.
fn update_listeners(&mut self, key: ListenerKey, sender_id: ChannelId) { fn update_listeners(&mut self, key: ListenerKey, sender_id: ChannelId) {
self.listener_table.add_listener(key, sender_id); self.listener_map.add_listener(key, sender_id);
} }
/// This function will use the cached event receiver and try to receive one event. /// This function will use the cached event receiver and try to receive one event.
@ -248,27 +236,23 @@ impl<E, Event: GenericEvent + Copy, AuxDataProvider: Clone>
/// [EventRoutingErrorsWithResult] error struct. /// [EventRoutingErrorsWithResult] error struct.
pub fn try_event_handling( pub fn try_event_handling(
&self, &self,
) -> Result< ) -> Result<EventRoutingResult<EV, AUX>, EventRoutingErrorsWithResult<EV, AUX>> {
EventRoutingResult<Event, AuxDataProvider>,
EventRoutingErrorsWithResult<Event, AuxDataProvider, E>,
> {
let mut err_idx = 0; let mut err_idx = 0;
let mut err_slice = [None, None, None]; let mut err_slice = [None, None, None];
let mut num_recipients = 0; let mut num_recipients = 0;
let mut add_error = |error: EventRoutingError<E>| { let mut add_error = |error: EventRoutingError| {
if err_idx < 3 { if err_idx < 3 {
err_slice[err_idx] = Some(error); err_slice[err_idx] = Some(error);
err_idx += 1; err_idx += 1;
} }
}; };
let mut send_handler = let mut send_handler = |key: &ListenerKey, event: EV, aux_data: &Option<AUX>| {
|key: &ListenerKey, event: Event, aux_data: &Option<AuxDataProvider>| { if self.listener_map.contains_listener(key) {
if self.listener_table.contains_listener(key) { if let Some(ids) = self.listener_map.get_listener_ids(key) {
if let Some(ids) = self.listener_table.get_listener_ids(key) {
for id in ids { for id in ids {
if let Some(sender) = self.sender_table.get_send_event_provider(id) { if let Some(sender) = self.sender_map.get_send_event_provider(id) {
if let Err(e) = sender.send(event, aux_data.clone()) { if let Err(e) = sender.send(event, aux_data.clone()) {
add_error(EventRoutingError::SendError(e)); add_error(EventRoutingError::Send(e));
} else { } else {
num_recipients += 1; num_recipients += 1;
} }
@ -281,7 +265,7 @@ impl<E, Event: GenericEvent + Copy, AuxDataProvider: Clone>
} }
} }
}; };
if let Some((event, aux_data)) = self.event_receiver.receive() { if let Some((event, aux_data)) = self.event_receiver.try_recv_event() {
let single_key = ListenerKey::Single(event.raw_as_largest_type()); let single_key = ListenerKey::Single(event.raw_as_largest_type());
send_handler(&single_key, event, &aux_data); send_handler(&single_key, event, &aux_data);
let group_key = ListenerKey::Group(event.group_id_as_largest_type()); let group_key = ListenerKey::Group(event.group_id_as_largest_type());
@ -289,43 +273,79 @@ impl<E, Event: GenericEvent + Copy, AuxDataProvider: Clone>
send_handler(&ListenerKey::All, event, &aux_data); send_handler(&ListenerKey::All, event, &aux_data);
if err_idx > 0 { if err_idx > 0 {
return Err(EventRoutingErrorsWithResult { return Err(EventRoutingErrorsWithResult {
result: EventRoutingResult::Handled(num_recipients, event, aux_data), result: EventRoutingResult::Handled {
num_recipients,
event,
aux_data,
},
errors: err_slice, errors: err_slice,
}); });
} }
return Ok(EventRoutingResult::Handled(num_recipients, event, aux_data)); return Ok(EventRoutingResult::Handled {
num_recipients,
event,
aux_data,
});
} }
Ok(EventRoutingResult::Empty) Ok(EventRoutingResult::Empty)
} }
} }
#[derive(Default)] #[cfg(feature = "alloc")]
pub struct DefaultListenerTableProvider { pub mod alloc_mod {
listeners: HashMap<ListenerKey, Vec<ChannelId>>, use alloc::vec::Vec;
} use hashbrown::HashMap;
pub struct DefaultSenderTableProvider< use super::*;
SendProviderError,
Event: GenericEvent = EventU32,
AuxDataProvider = Params,
> {
senders: HashMap<
ChannelId,
Box<dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>>,
>,
}
impl<SendProviderError, Event: GenericEvent, AuxDataProvider> Default /// Helper type which constrains the sender map and listener map generics to the [DefaultSenderMap]
for DefaultSenderTableProvider<SendProviderError, Event, AuxDataProvider> /// and the [DefaultListenerMap]. It uses regular mpsc channels as the message queue backend.
{ pub type EventManagerWithMpsc<EV = EventU32, AUX = Params> = EventManager<
fn default() -> Self { MpscEventReceiver,
DefaultSenderMap<EventSenderMpsc<EV>, EV, AUX>,
DefaultListenerMap,
EventSenderMpsc<EV>,
>;
/// Helper type which constrains the sender map and listener map generics to the [DefaultSenderMap]
/// and the [DefaultListenerMap]. It uses
/// [bounded mpsc senders](https://doc.rust-lang.org/std/sync/mpsc/struct.SyncSender.html) as the
/// message queue backend.
pub type EventManagerWithBoundedMpsc<EV = EventU32, AUX = Params> = EventManager<
MpscEventReceiver,
DefaultSenderMap<EventSenderMpscBounded<EV>, EV, AUX>,
DefaultListenerMap,
EventSenderMpscBounded<EV>,
>;
impl<
ER: EventReceiveProvider<EV, AUX>,
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent + Copy,
AUX: 'static,
> EventManager<ER, DefaultSenderMap<SP, EV, AUX>, DefaultListenerMap, SP, EV, AUX>
{
/// Create an event manager where the sender table will be the [DefaultSenderMap]
/// and the listener table will be the [DefaultListenerMap].
pub fn new(event_receiver: ER) -> Self {
Self { Self {
senders: HashMap::new(), listener_map: DefaultListenerMap::default(),
sender_map: DefaultSenderMap::default(),
event_receiver,
phantom: PhantomData,
}
} }
} }
}
impl ListenerTable for DefaultListenerTableProvider { /// Default listener map.
///
/// Simple implementation which uses a [HashMap] and a [Vec] internally.
#[derive(Default)]
pub struct DefaultListenerMap {
listeners: HashMap<ListenerKey, Vec<ChannelId>>,
}
impl ListenerMapProvider for DefaultListenerMap {
fn get_listeners(&self) -> Vec<ListenerKey> { fn get_listeners(&self) -> Vec<ListenerKey> {
let mut key_list = Vec::new(); let mut key_list = Vec::new();
for key in self.listeners.keys() { for key in self.listeners.keys() {
@ -346,7 +366,7 @@ impl ListenerTable for DefaultListenerTableProvider {
if let Some(existing_list) = self.listeners.get_mut(&key) { if let Some(existing_list) = self.listeners.get_mut(&key) {
existing_list.push(sender_id); existing_list.push(sender_id);
} else { } else {
let new_list = vec![sender_id]; let new_list = alloc::vec![sender_id];
self.listeners.insert(key, new_list); self.listeners.insert(key, new_list);
} }
true true
@ -358,61 +378,72 @@ impl ListenerTable for DefaultListenerTableProvider {
list.dedup(); list.dedup();
} }
} }
} }
impl<SendProviderError, Event: GenericEvent, AuxDataProvider> /// Default sender map.
SenderTable<SendProviderError, Event, AuxDataProvider> ///
for DefaultSenderTableProvider<SendProviderError, Event, AuxDataProvider> /// Simple implementation which uses a [HashMap] internally.
{ pub struct DefaultSenderMap<
SP: EventSendProvider<EV, AUX>,
EV: GenericEvent = EventU32,
AUX = Params,
> {
senders: HashMap<ChannelId, SP>,
phantom: PhantomData<(EV, AUX)>,
}
impl<SP: EventSendProvider<EV, AUX>, EV: GenericEvent, AUX> Default
for DefaultSenderMap<SP, EV, AUX>
{
fn default() -> Self {
Self {
senders: Default::default(),
phantom: Default::default(),
}
}
}
impl<SP: EventSendProvider<EV, AUX>, EV: GenericEvent, AUX> SenderMapProvider<SP, EV, AUX>
for DefaultSenderMap<SP, EV, AUX>
{
fn contains_send_event_provider(&self, id: &ChannelId) -> bool { fn contains_send_event_provider(&self, id: &ChannelId) -> bool {
self.senders.contains_key(id) self.senders.contains_key(id)
} }
fn get_send_event_provider( fn get_send_event_provider(&self, id: &ChannelId) -> Option<&SP> {
&self,
id: &ChannelId,
) -> Option<&dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>> {
self.senders self.senders
.get(id) .get(id)
.filter(|sender| sender.id() == *id) .filter(|sender| sender.channel_id() == *id)
.map(|v| v.as_ref())
} }
fn add_send_event_provider( fn add_send_event_provider(&mut self, send_provider: SP) -> bool {
&mut self, let id = send_provider.channel_id();
send_provider: Box<
dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>,
>,
) -> bool {
let id = send_provider.id();
if self.senders.contains_key(&id) { if self.senders.contains_key(&id) {
return false; return false;
} }
self.senders.insert(id, send_provider).is_none() self.senders.insert(id, send_provider).is_none()
} }
}
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub mod stdmod { pub mod std_mod {
use super::*; use super::*;
use crate::event_man::{EventReceiver, EventWithAuxData}; use std::sync::mpsc;
use crate::events::{EventU16, EventU32, GenericEvent};
use crate::params::Params;
use std::sync::mpsc::{Receiver, SendError, Sender};
pub struct MpscEventReceiver<Event: GenericEvent + Send = EventU32> { pub struct MpscEventReceiver<Event: GenericEvent + Send = EventU32> {
mpsc_receiver: Receiver<(Event, Option<Params>)>, mpsc_receiver: mpsc::Receiver<(Event, Option<Params>)>,
} }
impl<Event: GenericEvent + Send> MpscEventReceiver<Event> { impl<Event: GenericEvent + Send> MpscEventReceiver<Event> {
pub fn new(receiver: Receiver<(Event, Option<Params>)>) -> Self { pub fn new(receiver: mpsc::Receiver<(Event, Option<Params>)>) -> Self {
Self { Self {
mpsc_receiver: receiver, mpsc_receiver: receiver,
} }
} }
} }
impl<Event: GenericEvent + Send> EventReceiver<Event> for MpscEventReceiver<Event> { impl<Event: GenericEvent + Send> EventReceiveProvider<Event> for MpscEventReceiver<Event> {
fn receive(&self) -> Option<EventWithAuxData<Event>> { fn try_recv_event(&self) -> Option<EventWithAuxData<Event>> {
if let Ok(event_and_data) = self.mpsc_receiver.try_recv() { if let Ok(event_and_data) = self.mpsc_receiver.try_recv() {
return Some(event_and_data); return Some(event_and_data);
} }
@ -423,31 +454,75 @@ pub mod stdmod {
pub type MpscEventU32Receiver = MpscEventReceiver<EventU32>; pub type MpscEventU32Receiver = MpscEventReceiver<EventU32>;
pub type MpscEventU16Receiver = MpscEventReceiver<EventU16>; pub type MpscEventU16Receiver = MpscEventReceiver<EventU16>;
/// Generic event sender which uses a regular [mpsc::Sender] as the messaging backend to
/// send events.
#[derive(Clone)] #[derive(Clone)]
pub struct MpscEventSendProvider<Event: GenericEvent + Send> { pub struct EventSenderMpsc<Event: GenericEvent + Send> {
id: u32, id: u32,
sender: Sender<(Event, Option<Params>)>, sender: mpsc::Sender<(Event, Option<Params>)>,
} }
impl<Event: GenericEvent + Send> MpscEventSendProvider<Event> { impl<Event: GenericEvent + Send> EventSenderMpsc<Event> {
pub fn new(id: u32, sender: Sender<(Event, Option<Params>)>) -> Self { pub fn new(id: u32, sender: mpsc::Sender<(Event, Option<Params>)>) -> Self {
Self { id, sender } Self { id, sender }
} }
} }
impl<Event: GenericEvent + Send> SendEventProvider<Event> for MpscEventSendProvider<Event> { impl<Event: GenericEvent + Send> EventSendProvider<Event> for EventSenderMpsc<Event> {
type Error = SendError<(Event, Option<Params>)>; fn channel_id(&self) -> u32 {
fn id(&self) -> u32 {
self.id self.id
} }
fn send(&self, event: Event, aux_data: Option<Params>) -> Result<(), Self::Error> { fn send(&self, event: Event, aux_data: Option<Params>) -> Result<(), GenericSendError> {
self.sender.send((event, aux_data)) self.sender
.send((event, aux_data))
.map_err(|_| GenericSendError::RxDisconnected)
} }
} }
pub type MpscEventU32SendProvider = MpscEventSendProvider<EventU32>; /// Generic event sender which uses the [mpsc::SyncSender] as the messaging backend to send
pub type MpscEventU16SendProvider = MpscEventSendProvider<EventU16>; /// events. This has the advantage that the channel is bounded and thus more deterministic.
#[derive(Clone)]
pub struct EventSenderMpscBounded<Event: GenericEvent + Send> {
channel_id: u32,
sender: mpsc::SyncSender<(Event, Option<Params>)>,
capacity: usize,
}
impl<Event: GenericEvent + Send> EventSenderMpscBounded<Event> {
pub fn new(
channel_id: u32,
sender: mpsc::SyncSender<(Event, Option<Params>)>,
capacity: usize,
) -> Self {
Self {
channel_id,
sender,
capacity,
}
}
}
impl<Event: GenericEvent + Send> EventSendProvider<Event> for EventSenderMpscBounded<Event> {
fn channel_id(&self) -> u32 {
self.channel_id
}
fn send(&self, event: Event, aux_data: Option<Params>) -> Result<(), GenericSendError> {
if let Err(e) = self.sender.try_send((event, aux_data)) {
return match e {
mpsc::TrySendError::Full(_) => {
Err(GenericSendError::QueueFull(Some(self.capacity as u32)))
}
mpsc::TrySendError::Disconnected(_) => Err(GenericSendError::RxDisconnected),
};
}
Ok(())
}
}
pub type EventU32SenderMpsc = EventSenderMpsc<EventU32>;
pub type EventU16SenderMpsc = EventSenderMpsc<EventU16>;
pub type EventU32SenderMpscBounded = EventSenderMpscBounded<EventU32>;
pub type EventU16SenderMpscBounded = EventSenderMpscBounded<EventU16>;
} }
#[cfg(test)] #[cfg(test)]
@ -456,32 +531,10 @@ mod tests {
use crate::event_man::EventManager; use crate::event_man::EventManager;
use crate::events::{EventU32, GenericEvent, Severity}; use crate::events::{EventU32, GenericEvent, Severity};
use crate::params::ParamsRaw; use crate::params::ParamsRaw;
use alloc::boxed::Box;
use std::format; use std::format;
use std::sync::mpsc::{channel, Receiver, SendError, Sender}; use std::sync::mpsc::{self, channel, Receiver, Sender};
#[derive(Clone)] const TEST_EVENT: EventU32 = EventU32::const_new(Severity::INFO, 0, 5);
struct MpscEventSenderQueue {
id: u32,
mpsc_sender: Sender<EventU32WithAuxData>,
}
impl MpscEventSenderQueue {
fn new(id: u32, mpsc_sender: Sender<EventU32WithAuxData>) -> Self {
Self { id, mpsc_sender }
}
}
impl SendEventProvider<EventU32> for MpscEventSenderQueue {
type Error = SendError<EventU32WithAuxData>;
fn id(&self) -> u32 {
self.id
}
fn send(&self, event: EventU32, aux_data: Option<Params>) -> Result<(), Self::Error> {
self.mpsc_sender.send((event, aux_data))
}
}
fn check_next_event( fn check_next_event(
expected: EventU32, expected: EventU32,
@ -500,22 +553,21 @@ mod tests {
expected_num_sent: u32, expected_num_sent: u32,
) { ) {
assert!(matches!(res, EventRoutingResult::Handled { .. })); assert!(matches!(res, EventRoutingResult::Handled { .. }));
if let EventRoutingResult::Handled(num_recipients, event, _aux_data) = res { if let EventRoutingResult::Handled {
num_recipients,
event,
..
} = res
{
assert_eq!(event, expected); assert_eq!(event, expected);
assert_eq!(num_recipients, expected_num_sent); assert_eq!(num_recipients, expected_num_sent);
} }
} }
fn generic_event_man() -> ( fn generic_event_man() -> (Sender<EventU32WithAuxData>, EventManagerWithMpsc) {
Sender<EventU32WithAuxData>,
EventManager<SendError<EventU32WithAuxData>>,
) {
let (event_sender, manager_queue) = channel(); let (event_sender, manager_queue) = channel();
let event_man_receiver = MpscEventReceiver::new(manager_queue); let event_man_receiver = MpscEventReceiver::new(manager_queue);
( (event_sender, EventManager::new(event_man_receiver))
event_sender,
EventManager::new(Box::new(event_man_receiver)),
)
} }
#[test] #[test]
@ -524,15 +576,12 @@ mod tests {
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap(); let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap(); let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (single_event_sender, single_event_receiver) = channel(); let (single_event_sender, single_event_receiver) = channel();
let single_event_listener = MpscEventSenderQueue::new(0, single_event_sender); let single_event_listener = EventSenderMpsc::new(0, single_event_sender);
event_man.subscribe_single(&event_grp_0, single_event_listener.id()); event_man.subscribe_single(&event_grp_0, single_event_listener.channel_id());
event_man.add_sender(single_event_listener); event_man.add_sender(single_event_listener);
let (group_event_sender_0, group_event_receiver_0) = channel(); let (group_event_sender_0, group_event_receiver_0) = channel();
let group_event_listener = MpscEventSenderQueue { let group_event_listener = EventU32SenderMpsc::new(1, group_event_sender_0);
id: 1, event_man.subscribe_group(event_grp_1_0.group_id(), group_event_listener.channel_id());
mpsc_sender: group_event_sender_0,
};
event_man.subscribe_group(event_grp_1_0.group_id(), group_event_listener.id());
event_man.add_sender(group_event_listener); event_man.add_sender(group_event_listener);
// Test event with one listener // Test event with one listener
@ -559,8 +608,8 @@ mod tests {
let (event_sender, mut event_man) = generic_event_man(); let (event_sender, mut event_man) = generic_event_man();
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap(); let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let (single_event_sender, single_event_receiver) = channel(); let (single_event_sender, single_event_receiver) = channel();
let single_event_listener = MpscEventSenderQueue::new(0, single_event_sender); let single_event_listener = EventSenderMpsc::new(0, single_event_sender);
event_man.subscribe_single(&event_grp_0, single_event_listener.id()); event_man.subscribe_single(&event_grp_0, single_event_listener.channel_id());
event_man.add_sender(single_event_listener); event_man.add_sender(single_event_listener);
event_sender event_sender
.send((event_grp_0, Some(Params::Heapless((2_u32, 3_u32).into())))) .send((event_grp_0, Some(Params::Heapless((2_u32, 3_u32).into()))))
@ -591,12 +640,15 @@ mod tests {
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap(); let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap(); let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_grp_0_sender, event_grp_0_receiver) = channel(); let (event_grp_0_sender, event_grp_0_receiver) = channel();
let event_grp_0_and_1_listener = MpscEventSenderQueue { let event_grp_0_and_1_listener = EventU32SenderMpsc::new(0, event_grp_0_sender);
id: 0, event_man.subscribe_group(
mpsc_sender: event_grp_0_sender, event_grp_0.group_id(),
}; event_grp_0_and_1_listener.channel_id(),
event_man.subscribe_group(event_grp_0.group_id(), event_grp_0_and_1_listener.id()); );
event_man.subscribe_group(event_grp_1_0.group_id(), event_grp_0_and_1_listener.id()); event_man.subscribe_group(
event_grp_1_0.group_id(),
event_grp_0_and_1_listener.channel_id(),
);
event_man.add_sender(event_grp_0_and_1_listener); event_man.add_sender(event_grp_0_and_1_listener);
event_sender event_sender
@ -625,18 +677,12 @@ mod tests {
let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap(); let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_0_tx_0, event_0_rx_0) = channel(); let (event_0_tx_0, event_0_rx_0) = channel();
let (event_0_tx_1, event_0_rx_1) = channel(); let (event_0_tx_1, event_0_rx_1) = channel();
let event_listener_0 = MpscEventSenderQueue { let event_listener_0 = EventU32SenderMpsc::new(0, event_0_tx_0);
id: 0, let event_listener_1 = EventU32SenderMpsc::new(1, event_0_tx_1);
mpsc_sender: event_0_tx_0, let event_listener_0_sender_id = event_listener_0.channel_id();
};
let event_listener_1 = MpscEventSenderQueue {
id: 1,
mpsc_sender: event_0_tx_1,
};
let event_listener_0_sender_id = event_listener_0.id();
event_man.subscribe_single(&event_0, event_listener_0_sender_id); event_man.subscribe_single(&event_0, event_listener_0_sender_id);
event_man.add_sender(event_listener_0); event_man.add_sender(event_listener_0);
let event_listener_1_sender_id = event_listener_1.id(); let event_listener_1_sender_id = event_listener_1.channel_id();
event_man.subscribe_single(&event_0, event_listener_1_sender_id); event_man.subscribe_single(&event_0, event_listener_1_sender_id);
event_man.add_sender(event_listener_1); event_man.add_sender(event_listener_1);
event_sender event_sender
@ -681,16 +727,12 @@ mod tests {
fn test_all_events_listener() { fn test_all_events_listener() {
let (event_sender, manager_queue) = channel(); let (event_sender, manager_queue) = channel();
let event_man_receiver = MpscEventReceiver::new(manager_queue); let event_man_receiver = MpscEventReceiver::new(manager_queue);
let mut event_man: EventManager<SendError<EventU32WithAuxData>> = let mut event_man = EventManagerWithMpsc::new(event_man_receiver);
EventManager::new(Box::new(event_man_receiver));
let event_0 = EventU32::new(Severity::INFO, 0, 5).unwrap(); let event_0 = EventU32::new(Severity::INFO, 0, 5).unwrap();
let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap(); let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_0_tx_0, all_events_rx) = channel(); let (event_0_tx_0, all_events_rx) = channel();
let all_events_listener = MpscEventSenderQueue { let all_events_listener = EventU32SenderMpsc::new(0, event_0_tx_0);
id: 0, event_man.subscribe_all(all_events_listener.channel_id());
mpsc_sender: event_0_tx_0,
};
event_man.subscribe_all(all_events_listener.id());
event_man.add_sender(all_events_listener); event_man.add_sender(all_events_listener);
event_sender event_sender
.send((event_0, None)) .send((event_0, None))
@ -707,4 +749,36 @@ mod tests {
check_next_event(event_0, &all_events_rx); check_next_event(event_0, &all_events_rx);
check_next_event(event_1, &all_events_rx); check_next_event(event_1, &all_events_rx);
} }
#[test]
fn test_bounded_event_sender_queue_full() {
let (event_sender, _event_receiver) = mpsc::sync_channel(3);
let event_sender = EventU32SenderMpscBounded::new(1, event_sender, 3);
event_sender
.send_no_data(TEST_EVENT)
.expect("sending test event failed");
event_sender
.send_no_data(TEST_EVENT)
.expect("sending test event failed");
event_sender
.send_no_data(TEST_EVENT)
.expect("sending test event failed");
let error = event_sender.send_no_data(TEST_EVENT);
if let Err(e) = error {
assert!(matches!(e, GenericSendError::QueueFull(Some(3))));
} else {
panic!("unexpected error {error:?}");
}
}
#[test]
fn test_bounded_event_sender_rx_dropped() {
let (event_sender, event_receiver) = mpsc::sync_channel(3);
let event_sender = EventU32SenderMpscBounded::new(1, event_sender, 3);
drop(event_receiver);
if let Err(e) = event_sender.send_no_data(TEST_EVENT) {
assert!(matches!(e, GenericSendError::RxDisconnected));
} else {
panic!("Expected error");
}
}
} }

View File

@ -107,7 +107,7 @@ impl<TmError, TcError> TcpTmSender<TmError, TcError> for CobsTmSender {
/// ///
/// ## Example /// ## Example
/// ///
/// The [TCP integration tests](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-core/tests/tcp_servers.rs) /// The [TCP integration tests](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs/tests/tcp_servers.rs)
/// test also serves as the example application for this module. /// test also serves as the example application for this module.
pub struct TcpTmtcInCobsServer< pub struct TcpTmtcInCobsServer<
TmError, TmError,

View File

@ -88,7 +88,7 @@ impl<TmError, TcError> TcpTmSender<TmError, TcError> for SpacepacketsTmSender {
/// [spacepackets::PacketId]s as part of the server configuration for that purpose. /// [spacepackets::PacketId]s as part of the server configuration for that purpose.
/// ///
/// ## Example /// ## Example
/// The [TCP server integration tests](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-core/tests/tcp_servers.rs) /// The [TCP server integration tests](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs/tests/tcp_servers.rs)
/// also serves as the example application for this module. /// also serves as the example application for this module.
pub struct TcpSpacepacketsServer< pub struct TcpSpacepacketsServer<
TmError, TmError,

View File

@ -26,8 +26,6 @@ extern crate std;
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub mod cfdp; pub mod cfdp;
pub mod encoding; pub mod encoding;
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub mod event_man; pub mod event_man;
pub mod events; pub mod events;
#[cfg(feature = "std")] #[cfg(feature = "std")]

View File

@ -43,22 +43,19 @@
//! This includes the [ParamsHeapless] enumeration for contained values which do not require heap //! This includes the [ParamsHeapless] enumeration for contained values which do not require heap
//! allocation, and the [Params] which enumerates [ParamsHeapless] and some additional types which //! allocation, and the [Params] which enumerates [ParamsHeapless] and some additional types which
//! require [alloc] support but allow for more flexbility. //! require [alloc] support but allow for more flexbility.
#[cfg(feature = "alloc")]
use crate::pool::StoreAddr; use crate::pool::StoreAddr;
#[cfg(feature = "alloc")]
use alloc::string::{String, ToString};
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::fmt::Debug; use core::fmt::Debug;
use core::mem::size_of; use core::mem::size_of;
use paste::paste; use paste::paste;
use spacepackets::ecss::{EcssEnumU16, EcssEnumU32, EcssEnumU64, EcssEnumU8}; use spacepackets::ecss::{EcssEnumU16, EcssEnumU32, EcssEnumU64, EcssEnumU8};
pub use spacepackets::util::ToBeBytes;
use spacepackets::util::UnsignedEnum; use spacepackets::util::UnsignedEnum;
use spacepackets::ByteConversionError; use spacepackets::ByteConversionError;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use alloc_mod::*; use alloc::string::{String, ToString};
pub use spacepackets::util::ToBeBytes; #[cfg(feature = "alloc")]
use alloc::vec::Vec;
/// Generic trait which is used for objects which can be converted into a raw network (big) endian /// Generic trait which is used for objects which can be converted into a raw network (big) endian
/// byte format. /// byte format.
@ -560,57 +557,65 @@ from_conversions_for_raw!(
(f64, Self::F64), (f64, Self::F64),
); );
#[cfg(feature = "alloc")] /// Generic enumeration for additional parameters, including parameters which rely on heap
mod alloc_mod { /// allocations.
use super::*; #[derive(Debug, Clone)]
/// Generic enumeration for additional parameters, including parameters which rely on heap #[non_exhaustive]
/// allocations. pub enum Params {
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Debug, Clone)]
pub enum Params {
Heapless(ParamsHeapless), Heapless(ParamsHeapless),
Store(StoreAddr), Store(StoreAddr),
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
Vec(Vec<u8>), Vec(Vec<u8>),
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
String(String), String(String),
} }
impl From<StoreAddr> for Params { impl From<StoreAddr> for Params {
fn from(x: StoreAddr) -> Self { fn from(x: StoreAddr) -> Self {
Self::Store(x) Self::Store(x)
} }
} }
impl From<ParamsHeapless> for Params { impl From<ParamsHeapless> for Params {
fn from(x: ParamsHeapless) -> Self { fn from(x: ParamsHeapless) -> Self {
Self::Heapless(x) Self::Heapless(x)
} }
} }
impl From<Vec<u8>> for Params { #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
impl From<Vec<u8>> for Params {
fn from(val: Vec<u8>) -> Self { fn from(val: Vec<u8>) -> Self {
Self::Vec(val) Self::Vec(val)
} }
} }
/// Converts a byte slice into the [Params::Vec] variant /// Converts a byte slice into the [Params::Vec] variant
impl From<&[u8]> for Params { #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
impl From<&[u8]> for Params {
fn from(val: &[u8]) -> Self { fn from(val: &[u8]) -> Self {
Self::Vec(val.to_vec()) Self::Vec(val.to_vec())
} }
} }
impl From<String> for Params { #[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
impl From<String> for Params {
fn from(val: String) -> Self { fn from(val: String) -> Self {
Self::String(val) Self::String(val)
} }
} }
/// Converts a string slice into the [Params::String] variant #[cfg(feature = "alloc")]
impl From<&str> for Params { #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
/// Converts a string slice into the [Params::String] variant
impl From<&str> for Params {
fn from(val: &str) -> Self { fn from(val: &str) -> Self {
Self::String(val.to_string()) Self::String(val.to_string())
} }
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -24,6 +24,42 @@ pub enum SwitchState {
Faulty = 3, Faulty = 3,
} }
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum SwitchStateBinary {
Off = 0,
On = 1,
}
impl TryFrom<SwitchState> for SwitchStateBinary {
type Error = ();
fn try_from(value: SwitchState) -> Result<Self, Self::Error> {
match value {
SwitchState::Off => Ok(SwitchStateBinary::Off),
SwitchState::On => Ok(SwitchStateBinary::On),
_ => Err(()),
}
}
}
impl<T: Into<u64>> From<T> for SwitchStateBinary {
fn from(value: T) -> Self {
if value.into() == 0 {
return SwitchStateBinary::Off;
}
SwitchStateBinary::On
}
}
impl From<SwitchStateBinary> for SwitchState {
fn from(value: SwitchStateBinary) -> Self {
match value {
SwitchStateBinary::Off => SwitchState::Off,
SwitchStateBinary::On => SwitchState::On,
}
}
}
pub type SwitchId = u16; pub type SwitchId = u16;
/// Generic trait for a device capable of turning on and off switches. /// Generic trait for a device capable of turning on and off switches.

View File

@ -44,7 +44,7 @@ pub mod alloc_mod {
/// - Checking the validity of the APID, service ID, subservice ID. /// - Checking the validity of the APID, service ID, subservice ID.
/// - Checking the validity of the user data. /// - Checking the validity of the user data.
/// ///
/// A [VerificationReporterWithSender] instance is passed to the user to also allow handling /// A [VerificationReportingProvider] instance is passed to the user to also allow handling
/// of the verification process as part of the PUS standard requirements. /// of the verification process as part of the PUS standard requirements.
pub trait PusActionToRequestConverter { pub trait PusActionToRequestConverter {
type Error; type Error;
@ -62,9 +62,9 @@ pub mod alloc_mod {
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub mod std_mod { pub mod std_mod {
use crate::pus::{ use crate::pus::{
verification::VerificationReportingProvider, EcssTcInMemConverter, GenericRoutingError, get_current_cds_short_timestamp, verification::VerificationReportingProvider,
PusPacketHandlerResult, PusPacketHandlingError, PusRoutingErrorHandler, PusServiceBase, EcssTcInMemConverter, EcssTcReceiverCore, EcssTmSenderCore, GenericRoutingError,
PusServiceHelper, PusPacketHandlerResult, PusPacketHandlingError, PusRoutingErrorHandler, PusServiceHelper,
}; };
use super::*; use super::*;
@ -81,6 +81,8 @@ pub mod std_mod {
/// 3. Route the action request using the provided [PusActionRequestRouter]. /// 3. Route the action request using the provided [PusActionRequestRouter].
/// 4. Handle all routing errors using the provided [PusRoutingErrorHandler]. /// 4. Handle all routing errors using the provided [PusRoutingErrorHandler].
pub struct PusService8ActionHandler< pub struct PusService8ActionHandler<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
RequestConverter: PusActionToRequestConverter, RequestConverter: PusActionToRequestConverter,
@ -88,13 +90,16 @@ pub mod std_mod {
RoutingErrorHandler: PusRoutingErrorHandler<Error = RoutingError>, RoutingErrorHandler: PusRoutingErrorHandler<Error = RoutingError>,
RoutingError = GenericRoutingError, RoutingError = GenericRoutingError,
> { > {
service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub request_converter: RequestConverter, pub request_converter: RequestConverter,
pub request_router: RequestRouter, pub request_router: RequestRouter,
pub routing_error_handler: RoutingErrorHandler, pub routing_error_handler: RoutingErrorHandler,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
RequestConverter: PusActionToRequestConverter<Error = PusPacketHandlingError>, RequestConverter: PusActionToRequestConverter<Error = PusPacketHandlingError>,
@ -103,6 +108,8 @@ pub mod std_mod {
RoutingError: Clone, RoutingError: Clone,
> >
PusService8ActionHandler< PusService8ActionHandler<
TcReceiver,
TmSender,
TcInMemConverter, TcInMemConverter,
VerificationReporter, VerificationReporter,
RequestConverter, RequestConverter,
@ -114,7 +121,12 @@ pub mod std_mod {
PusPacketHandlingError: From<RoutingError>, PusPacketHandlingError: From<RoutingError>,
{ {
pub fn new( pub fn new(
service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
request_converter: RequestConverter, request_converter: RequestConverter,
request_router: RequestRouter, request_router: RequestRouter,
routing_error_handler: RoutingErrorHandler, routing_error_handler: RoutingErrorHandler,
@ -139,10 +151,7 @@ pub mod std_mod {
.tc_in_mem_converter .tc_in_mem_converter
.convert_ecss_tc_in_memory_to_reader(&ecss_tc_and_token.tc_in_memory)?; .convert_ecss_tc_in_memory_to_reader(&ecss_tc_and_token.tc_in_memory)?;
let mut partial_error = None; let mut partial_error = None;
let time_stamp = let time_stamp = get_current_cds_short_timestamp(&mut partial_error);
PusServiceBase::<VerificationReporter>::get_current_cds_short_timestamp(
&mut partial_error,
);
let (target_id, action_request) = self.request_converter.convert( let (target_id, action_request) = self.request_converter.convert(
ecss_tc_and_token.token, ecss_tc_and_token.token,
&tc, &tc,
@ -189,7 +198,8 @@ mod tests {
verification::{ verification::{
tests::TestVerificationReporter, FailParams, RequestId, VerificationReportingProvider, tests::TestVerificationReporter, FailParams, RequestId, VerificationReportingProvider,
}, },
EcssTcInVecConverter, GenericRoutingError, PusPacketHandlerResult, PusPacketHandlingError, EcssTcInVecConverter, GenericRoutingError, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, TmAsVecSenderWithMpsc,
}; };
use super::*; use super::*;
@ -259,6 +269,8 @@ mod tests {
struct Pus8HandlerWithVecTester { struct Pus8HandlerWithVecTester {
common: PusServiceHandlerWithVecCommon<TestVerificationReporter>, common: PusServiceHandlerWithVecCommon<TestVerificationReporter>,
handler: PusService8ActionHandler< handler: PusService8ActionHandler<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter, EcssTcInVecConverter,
TestVerificationReporter, TestVerificationReporter,
TestConverter<8>, TestConverter<8>,

View File

@ -269,7 +269,7 @@ mod tests {
} }
impl EcssChannel for TestSender { impl EcssChannel for TestSender {
fn id(&self) -> ChannelId { fn channel_id(&self) -> ChannelId {
0 0
} }
} }

View File

@ -2,8 +2,6 @@ use crate::events::{EventU32, GenericEvent, Severity};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use crate::events::{EventU32TypedSev, HasSeverity}; use crate::events::{EventU32TypedSev, HasSeverity};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "alloc")]
use core::hash::Hash; use core::hash::Hash;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use hashbrown::HashSet; use hashbrown::HashSet;
@ -32,19 +30,19 @@ pub use heapless_mod::*;
/// structure to track disabled events. A more primitive and embedded friendly /// structure to track disabled events. A more primitive and embedded friendly
/// solution could track this information in a static or pre-allocated list which contains /// solution could track this information in a static or pre-allocated list which contains
/// the disabled events. /// the disabled events.
pub trait PusEventMgmtBackendProvider<Provider: GenericEvent> { pub trait PusEventMgmtBackendProvider<Event: GenericEvent> {
type Error; type Error;
fn event_enabled(&self, event: &Provider) -> bool; fn event_enabled(&self, event: &Event) -> bool;
fn enable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error>; fn enable_event_reporting(&mut self, event: &Event) -> Result<bool, Self::Error>;
fn disable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error>; fn disable_event_reporting(&mut self, event: &Event) -> Result<bool, Self::Error>;
} }
#[cfg(feature = "heapless")] #[cfg(feature = "heapless")]
pub mod heapless_mod { pub mod heapless_mod {
use super::*; use super::*;
use crate::events::{GenericEvent, LargestEventRaw}; use crate::events::LargestEventRaw;
use std::marker::PhantomData; use core::marker::PhantomData;
#[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))]
// TODO: After a new version of heapless is released which uses hash32 version 0.3, try using // TODO: After a new version of heapless is released which uses hash32 version 0.3, try using
@ -108,6 +106,10 @@ impl From<EcssTmtcError> for EventManError {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub mod alloc_mod { pub mod alloc_mod {
use core::marker::PhantomData;
use crate::events::EventU16;
use super::*; use super::*;
/// Default backend provider which uses a hash set as the event reporting status container /// Default backend provider which uses a hash set as the event reporting status container
@ -115,14 +117,11 @@ pub mod alloc_mod {
/// ///
/// This provider is a good option for host systems or larger embedded systems where /// This provider is a good option for host systems or larger embedded systems where
/// the expected occasional memory allocation performed by the [HashSet] is not an issue. /// the expected occasional memory allocation performed by the [HashSet] is not an issue.
pub struct DefaultPusMgmtBackendProvider<Event: GenericEvent = EventU32> { pub struct DefaultPusEventMgmtBackend<Event: GenericEvent = EventU32> {
disabled: HashSet<Event>, disabled: HashSet<Event>,
} }
/// Safety: All contained field are [Send] as well impl<Event: GenericEvent> Default for DefaultPusEventMgmtBackend<Event> {
unsafe impl<Event: GenericEvent + Send> Send for DefaultPusMgmtBackendProvider<Event> {}
impl<Event: GenericEvent> Default for DefaultPusMgmtBackendProvider<Event> {
fn default() -> Self { fn default() -> Self {
Self { Self {
disabled: HashSet::default(), disabled: HashSet::default(),
@ -130,46 +129,50 @@ pub mod alloc_mod {
} }
} }
impl<Provider: GenericEvent + PartialEq + Eq + Hash + Copy + Clone> impl<EV: GenericEvent + PartialEq + Eq + Hash + Copy + Clone> PusEventMgmtBackendProvider<EV>
PusEventMgmtBackendProvider<Provider> for DefaultPusMgmtBackendProvider<Provider> for DefaultPusEventMgmtBackend<EV>
{ {
type Error = (); type Error = ();
fn event_enabled(&self, event: &Provider) -> bool {
fn event_enabled(&self, event: &EV) -> bool {
!self.disabled.contains(event) !self.disabled.contains(event)
} }
fn enable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> { fn enable_event_reporting(&mut self, event: &EV) -> Result<bool, Self::Error> {
Ok(self.disabled.remove(event)) Ok(self.disabled.remove(event))
} }
fn disable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> { fn disable_event_reporting(&mut self, event: &EV) -> Result<bool, Self::Error> {
Ok(self.disabled.insert(*event)) Ok(self.disabled.insert(*event))
} }
} }
pub struct PusEventDispatcher<BackendError, Provider: GenericEvent> { pub struct PusEventDispatcher<
B: PusEventMgmtBackendProvider<EV, Error = E>,
EV: GenericEvent,
E,
> {
reporter: EventReporter, reporter: EventReporter,
backend: Box<dyn PusEventMgmtBackendProvider<Provider, Error = BackendError>>, backend: B,
phantom: PhantomData<(E, EV)>,
} }
/// Safety: All contained fields are send as well. impl<B: PusEventMgmtBackendProvider<EV, Error = E>, EV: GenericEvent, E>
unsafe impl<E: Send, Event: GenericEvent + Send> Send for PusEventDispatcher<E, Event> {} PusEventDispatcher<B, EV, E>
{
impl<BackendError, Provider: GenericEvent> PusEventDispatcher<BackendError, Provider> { pub fn new(reporter: EventReporter, backend: B) -> Self {
pub fn new( Self {
reporter: EventReporter, reporter,
backend: Box<dyn PusEventMgmtBackendProvider<Provider, Error = BackendError>>, backend,
) -> Self { phantom: PhantomData,
Self { reporter, backend }
} }
} }
impl<BackendError, Event: GenericEvent> PusEventDispatcher<BackendError, Event> { pub fn enable_tm_for_event(&mut self, event: &EV) -> Result<bool, E> {
pub fn enable_tm_for_event(&mut self, event: &Event) -> Result<bool, BackendError> {
self.backend.enable_event_reporting(event) self.backend.enable_event_reporting(event)
} }
pub fn disable_tm_for_event(&mut self, event: &Event) -> Result<bool, BackendError> { pub fn disable_tm_for_event(&mut self, event: &EV) -> Result<bool, E> {
self.backend.disable_event_reporting(event) self.backend.disable_event_reporting(event)
} }
@ -177,7 +180,7 @@ pub mod alloc_mod {
&mut self, &mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized), sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8], time_stamp: &[u8],
event: Event, event: EV,
aux_data: Option<&[u8]>, aux_data: Option<&[u8]>,
) -> Result<bool, EventManError> { ) -> Result<bool, EventManError> {
if !self.backend.event_enabled(&event) { if !self.backend.event_enabled(&event) {
@ -208,18 +211,30 @@ pub mod alloc_mod {
} }
} }
impl<BackendError> PusEventDispatcher<BackendError, EventU32> { impl<EV: GenericEvent + Copy + PartialEq + Eq + Hash>
PusEventDispatcher<DefaultPusEventMgmtBackend<EV>, EV, ()>
{
pub fn new_with_default_backend(reporter: EventReporter) -> Self {
Self {
reporter,
backend: DefaultPusEventMgmtBackend::default(),
phantom: PhantomData,
}
}
}
impl<B: PusEventMgmtBackendProvider<EventU32, Error = E>, E> PusEventDispatcher<B, EventU32, E> {
pub fn enable_tm_for_event_with_sev<Severity: HasSeverity>( pub fn enable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self, &mut self,
event: &EventU32TypedSev<Severity>, event: &EventU32TypedSev<Severity>,
) -> Result<bool, BackendError> { ) -> Result<bool, E> {
self.backend.enable_event_reporting(event.as_ref()) self.backend.enable_event_reporting(event.as_ref())
} }
pub fn disable_tm_for_event_with_sev<Severity: HasSeverity>( pub fn disable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self, &mut self,
event: &EventU32TypedSev<Severity>, event: &EventU32TypedSev<Severity>,
) -> Result<bool, BackendError> { ) -> Result<bool, E> {
self.backend.disable_event_reporting(event.as_ref()) self.backend.disable_event_reporting(event.as_ref())
} }
@ -233,30 +248,38 @@ pub mod alloc_mod {
self.generate_pus_event_tm_generic(sender, time_stamp, event.into(), aux_data) self.generate_pus_event_tm_generic(sender, time_stamp, event.into(), aux_data)
} }
} }
pub type DefaultPusEventU16Dispatcher<E> =
PusEventDispatcher<DefaultPusEventMgmtBackend<EventU16>, EventU16, E>;
pub type DefaultPusEventU32Dispatcher<E> =
PusEventDispatcher<DefaultPusEventMgmtBackend<EventU32>, EventU32, E>;
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::events::SeverityInfo; use crate::{events::SeverityInfo, pus::TmAsVecSenderWithMpsc};
use crate::pus::MpscTmAsVecSender; use std::sync::mpsc::{self, TryRecvError};
use std::sync::mpsc::{channel, TryRecvError};
const INFO_EVENT: EventU32TypedSev<SeverityInfo> = const INFO_EVENT: EventU32TypedSev<SeverityInfo> =
EventU32TypedSev::<SeverityInfo>::const_new(1, 0); EventU32TypedSev::<SeverityInfo>::const_new(1, 0);
const LOW_SEV_EVENT: EventU32 = EventU32::const_new(Severity::LOW, 1, 5); const LOW_SEV_EVENT: EventU32 = EventU32::const_new(Severity::LOW, 1, 5);
const EMPTY_STAMP: [u8; 7] = [0; 7]; const EMPTY_STAMP: [u8; 7] = [0; 7];
fn create_basic_man() -> PusEventDispatcher<(), EventU32> { fn create_basic_man_1() -> DefaultPusEventU32Dispatcher<()> {
let reporter = EventReporter::new(0x02, 128).expect("Creating event repoter failed"); let reporter = EventReporter::new(0x02, 128).expect("Creating event repoter failed");
let backend = DefaultPusMgmtBackendProvider::<EventU32>::default(); PusEventDispatcher::new_with_default_backend(reporter)
PusEventDispatcher::new(reporter, Box::new(backend)) }
fn create_basic_man_2() -> DefaultPusEventU32Dispatcher<()> {
let reporter = EventReporter::new(0x02, 128).expect("Creating event repoter failed");
let backend = DefaultPusEventMgmtBackend::default();
PusEventDispatcher::new(reporter, backend)
} }
#[test] #[test]
fn test_basic() { fn test_basic() {
let mut event_man = create_basic_man(); let mut event_man = create_basic_man_1();
let (event_tx, event_rx) = channel(); let (event_tx, event_rx) = mpsc::channel();
let mut sender = MpscTmAsVecSender::new(0, "test_sender", event_tx); let mut sender = TmAsVecSenderWithMpsc::new(0, "test_sender", event_tx);
let event_sent = event_man let event_sent = event_man
.generate_pus_event_tm(&mut sender, &EMPTY_STAMP, INFO_EVENT, None) .generate_pus_event_tm(&mut sender, &EMPTY_STAMP, INFO_EVENT, None)
.expect("Sending info event failed"); .expect("Sending info event failed");
@ -268,9 +291,9 @@ mod tests {
#[test] #[test]
fn test_disable_event() { fn test_disable_event() {
let mut event_man = create_basic_man(); let mut event_man = create_basic_man_2();
let (event_tx, event_rx) = channel(); let (event_tx, event_rx) = mpsc::channel();
let mut sender = MpscTmAsVecSender::new(0, "test", event_tx); let mut sender = TmAsVecSenderWithMpsc::new(0, "test", event_tx);
let res = event_man.disable_tm_for_event(&LOW_SEV_EVENT); let res = event_man.disable_tm_for_event(&LOW_SEV_EVENT);
assert!(res.is_ok()); assert!(res.is_ok());
assert!(res.unwrap()); assert!(res.unwrap());
@ -291,9 +314,9 @@ mod tests {
#[test] #[test]
fn test_reenable_event() { fn test_reenable_event() {
let mut event_man = create_basic_man(); let mut event_man = create_basic_man_1();
let (event_tx, event_rx) = channel(); let (event_tx, event_rx) = mpsc::channel();
let mut sender = MpscTmAsVecSender::new(0, "test", event_tx); let mut sender = TmAsVecSenderWithMpsc::new(0, "test", event_tx);
let mut res = event_man.disable_tm_for_event_with_sev(&INFO_EVENT); let mut res = event_man.disable_tm_for_event_with_sev(&INFO_EVENT);
assert!(res.is_ok()); assert!(res.is_ok());
assert!(res.unwrap()); assert!(res.unwrap());

View File

@ -7,27 +7,40 @@ use spacepackets::ecss::PusPacket;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
use super::verification::VerificationReportingProvider; use super::verification::VerificationReportingProvider;
use super::{EcssTcInMemConverter, PusServiceBase, PusServiceHelper}; use super::{
get_current_cds_short_timestamp, EcssTcInMemConverter, EcssTcReceiverCore, EcssTmSenderCore,
PusServiceHelper,
};
pub struct PusService5EventHandler< pub struct PusService5EventHandler<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> { > {
pub service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, pub service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
event_request_tx: Sender<EventRequestWithToken>, event_request_tx: Sender<EventRequestWithToken>,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> PusService5EventHandler<TcInMemConverter, VerificationReporter> > PusService5EventHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{ {
pub fn new( pub fn new(
service_handler: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
event_request_tx: Sender<EventRequestWithToken>, event_request_tx: Sender<EventRequestWithToken>,
) -> Self { ) -> Self {
Self { Self {
service_helper: service_handler, service_helper,
event_request_tx, event_request_tx,
} }
} }
@ -94,9 +107,7 @@ impl<
Ok(PusPacketHandlerResult::RequestHandled) Ok(PusPacketHandlerResult::RequestHandled)
}; };
let mut partial_error = None; let mut partial_error = None;
let time_stamp = PusServiceBase::<VerificationReporter>::get_current_cds_short_timestamp( let time_stamp = get_current_cds_short_timestamp(&mut partial_error);
&mut partial_error,
);
match srv.unwrap() { match srv.unwrap() {
Subservice::TmInfoReport Subservice::TmInfoReport
| Subservice::TmLowSeverityReport | Subservice::TmLowSeverityReport
@ -138,7 +149,10 @@ mod tests {
use crate::pus::event_man::EventRequest; use crate::pus::event_man::EventRequest;
use crate::pus::tests::SimplePusPacketHandler; use crate::pus::tests::SimplePusPacketHandler;
use crate::pus::verification::{RequestId, VerificationReporterWithSender}; use crate::pus::verification::{
RequestId, VerificationReporterWithSharedPoolMpscBoundedSender,
};
use crate::pus::{MpscTcReceiver, TmInSharedPoolSenderWithBoundedMpsc};
use crate::{ use crate::{
events::EventU32, events::EventU32,
pus::{ pus::{
@ -155,8 +169,12 @@ mod tests {
struct Pus5HandlerWithStoreTester { struct Pus5HandlerWithStoreTester {
common: PusServiceHandlerWithSharedStoreCommon, common: PusServiceHandlerWithSharedStoreCommon,
handler: handler: PusService5EventHandler<
PusService5EventHandler<EcssTcInSharedStoreConverter, VerificationReporterWithSender>, MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
>,
} }
impl Pus5HandlerWithStoreTester { impl Pus5HandlerWithStoreTester {

View File

@ -46,7 +46,7 @@ pub mod alloc_mod {
/// - Checking the validity of the APID, service ID, subservice ID. /// - Checking the validity of the APID, service ID, subservice ID.
/// - Checking the validity of the user data. /// - Checking the validity of the user data.
/// ///
/// A [VerificationReporterWithSender] instance is passed to the user to also allow handling /// A [VerificationReportingProvider] is passed to the user to also allow handling
/// of the verification process as part of the PUS standard requirements. /// of the verification process as part of the PUS standard requirements.
pub trait PusHkToRequestConverter { pub trait PusHkToRequestConverter {
type Error; type Error;
@ -64,9 +64,9 @@ pub mod alloc_mod {
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub mod std_mod { pub mod std_mod {
use crate::pus::{ use crate::pus::{
verification::VerificationReportingProvider, EcssTcInMemConverter, GenericRoutingError, get_current_cds_short_timestamp, verification::VerificationReportingProvider,
PusPacketHandlerResult, PusPacketHandlingError, PusRoutingErrorHandler, PusServiceBase, EcssTcInMemConverter, EcssTcReceiverCore, EcssTmSenderCore, GenericRoutingError,
PusServiceHelper, PusPacketHandlerResult, PusPacketHandlingError, PusRoutingErrorHandler, PusServiceHelper,
}; };
use super::*; use super::*;
@ -78,13 +78,15 @@ pub mod std_mod {
/// 1. Retrieve the next TC packet from the [PusServiceHelper]. The [EcssTcInMemConverter] /// 1. Retrieve the next TC packet from the [PusServiceHelper]. The [EcssTcInMemConverter]
/// allows to configure the used telecommand memory backend. /// allows to configure the used telecommand memory backend.
/// 2. Convert the TC to a targeted action request using the provided /// 2. Convert the TC to a targeted action request using the provided
/// [PusActionToRequestConverter]. The generic error type is constrained to the /// [PusHkToRequestConverter]. The generic error type is constrained to the
/// [PusPacketHandlerResult] for the concrete implementation which offers a packet handler. /// [PusPacketHandlerResult] for the concrete implementation which offers a packet handler.
/// 3. Route the action request using the provided [PusActionRequestRouter]. The generic error /// 3. Route the action request using the provided [PusHkRequestRouter]. The generic error
/// type is constrained to the [GenericRoutingError] for the concrete implementation. /// type is constrained to the [GenericRoutingError] for the concrete implementation.
/// 4. Handle all routing errors using the provided [PusRoutingErrorHandler]. The generic error /// 4. Handle all routing errors using the provided [PusRoutingErrorHandler]. The generic error
/// type is constrained to the [GenericRoutingError] for the concrete implementation. /// type is constrained to the [GenericRoutingError] for the concrete implementation.
pub struct PusService3HkHandler< pub struct PusService3HkHandler<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
RequestConverter: PusHkToRequestConverter, RequestConverter: PusHkToRequestConverter,
@ -92,13 +94,16 @@ pub mod std_mod {
RoutingErrorHandler: PusRoutingErrorHandler<Error = RoutingError>, RoutingErrorHandler: PusRoutingErrorHandler<Error = RoutingError>,
RoutingError = GenericRoutingError, RoutingError = GenericRoutingError,
> { > {
service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub request_converter: RequestConverter, pub request_converter: RequestConverter,
pub request_router: RequestRouter, pub request_router: RequestRouter,
pub routing_error_handler: RoutingErrorHandler, pub routing_error_handler: RoutingErrorHandler,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
RequestConverter: PusHkToRequestConverter<Error = PusPacketHandlingError>, RequestConverter: PusHkToRequestConverter<Error = PusPacketHandlingError>,
@ -107,6 +112,8 @@ pub mod std_mod {
RoutingError: Clone, RoutingError: Clone,
> >
PusService3HkHandler< PusService3HkHandler<
TcReceiver,
TmSender,
TcInMemConverter, TcInMemConverter,
VerificationReporter, VerificationReporter,
RequestConverter, RequestConverter,
@ -118,7 +125,12 @@ pub mod std_mod {
PusPacketHandlingError: From<RoutingError>, PusPacketHandlingError: From<RoutingError>,
{ {
pub fn new( pub fn new(
service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
request_converter: RequestConverter, request_converter: RequestConverter,
request_router: RequestRouter, request_router: RequestRouter,
routing_error_handler: RoutingErrorHandler, routing_error_handler: RoutingErrorHandler,
@ -142,10 +154,7 @@ pub mod std_mod {
.tc_in_mem_converter .tc_in_mem_converter
.convert_ecss_tc_in_memory_to_reader(&ecss_tc_and_token.tc_in_memory)?; .convert_ecss_tc_in_memory_to_reader(&ecss_tc_and_token.tc_in_memory)?;
let mut partial_error = None; let mut partial_error = None;
let time_stamp = let time_stamp = get_current_cds_short_timestamp(&mut partial_error);
PusServiceBase::<VerificationReporter>::get_current_cds_short_timestamp(
&mut partial_error,
);
let (target_id, hk_request) = self.request_converter.convert( let (target_id, hk_request) = self.request_converter.convert(
ecss_tc_and_token.token, ecss_tc_and_token.token,
&tc, &tc,
@ -185,6 +194,7 @@ mod tests {
CcsdsPacket, SequenceFlags, SpHeader, CcsdsPacket, SequenceFlags, SpHeader,
}; };
use crate::pus::{MpscTcReceiver, TmAsVecSenderWithMpsc};
use crate::{ use crate::{
hk::HkRequest, hk::HkRequest,
pus::{ pus::{
@ -268,6 +278,8 @@ mod tests {
struct Pus3HandlerWithVecTester { struct Pus3HandlerWithVecTester {
common: PusServiceHandlerWithVecCommon<TestVerificationReporter>, common: PusServiceHandlerWithVecCommon<TestVerificationReporter>,
handler: PusService3HkHandler< handler: PusService3HkHandler<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter, EcssTcInVecConverter,
TestVerificationReporter, TestVerificationReporter,
TestConverter<3>, TestConverter<3>,

View File

@ -2,6 +2,8 @@
//! //!
//! This module contains structures to make working with the PUS C standard easier. //! This module contains structures to make working with the PUS C standard easier.
//! The satrs-example application contains various usage examples of these components. //! The satrs-example application contains various usage examples of these components.
use crate::pool::{StoreAddr, StoreError};
use crate::pus::verification::{TcStateAccepted, TcStateToken, VerificationToken};
use crate::queue::{GenericRecvError, GenericSendError}; use crate::queue::{GenericRecvError, GenericSendError};
use crate::ChannelId; use crate::ChannelId;
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
@ -34,8 +36,6 @@ pub mod verification;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use alloc_mod::*; pub use alloc_mod::*;
use crate::pool::{StoreAddr, StoreError};
use crate::pus::verification::{TcStateAccepted, TcStateToken, VerificationToken};
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use std_mod::*; pub use std_mod::*;
@ -63,6 +63,7 @@ pub enum EcssTmtcError {
Store(StoreError), Store(StoreError),
Pus(PusError), Pus(PusError),
CantSendAddr(StoreAddr), CantSendAddr(StoreAddr),
CantSendDirectTm,
Send(GenericSendError), Send(GenericSendError),
Recv(GenericRecvError), Recv(GenericRecvError),
} }
@ -82,6 +83,9 @@ impl Display for EcssTmtcError {
EcssTmtcError::CantSendAddr(addr) => { EcssTmtcError::CantSendAddr(addr) => {
write!(f, "can not send address {addr}") write!(f, "can not send address {addr}")
} }
EcssTmtcError::CantSendDirectTm => {
write!(f, "can not send TM directly")
}
EcssTmtcError::Send(send_e) => { EcssTmtcError::Send(send_e) => {
write!(f, "send error {send_e}") write!(f, "send error {send_e}")
} }
@ -123,13 +127,14 @@ impl Error for EcssTmtcError {
EcssTmtcError::Store(e) => Some(e), EcssTmtcError::Store(e) => Some(e),
EcssTmtcError::Pus(e) => Some(e), EcssTmtcError::Pus(e) => Some(e),
EcssTmtcError::Send(e) => Some(e), EcssTmtcError::Send(e) => Some(e),
EcssTmtcError::Recv(e) => Some(e),
_ => None, _ => None,
} }
} }
} }
pub trait EcssChannel: Send { pub trait EcssChannel: Send {
/// Each sender can have an ID associated with it /// Each sender can have an ID associated with it
fn id(&self) -> ChannelId; fn channel_id(&self) -> ChannelId;
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
"unset" "unset"
} }
@ -138,7 +143,7 @@ pub trait EcssChannel: Send {
/// Generic trait for a user supplied sender object. /// Generic trait for a user supplied sender object.
/// ///
/// This sender object is responsible for sending PUS telemetry to a TM sink. /// This sender object is responsible for sending PUS telemetry to a TM sink.
pub trait EcssTmSenderCore: EcssChannel { pub trait EcssTmSenderCore: Send {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError>; fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError>;
} }
@ -146,7 +151,7 @@ pub trait EcssTmSenderCore: EcssChannel {
/// ///
/// This sender object is responsible for sending PUS telecommands to a TC recipient. Each /// This sender object is responsible for sending PUS telecommands to a TC recipient. Each
/// telecommand can optionally have a token which contains its verification state. /// telecommand can optionally have a token which contains its verification state.
pub trait EcssTcSenderCore: EcssChannel { pub trait EcssTcSenderCore {
fn send_tc(&self, tc: PusTcCreator, token: Option<TcStateToken>) -> Result<(), EcssTmtcError>; fn send_tc(&self, tc: PusTcCreator, token: Option<TcStateToken>) -> Result<(), EcssTmtcError>;
} }
@ -221,25 +226,25 @@ impl TryFrom<EcssTcAndToken> for AcceptedEcssTcAndToken {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum TryRecvTmtcError { pub enum TryRecvTmtcError {
Error(EcssTmtcError), Tmtc(EcssTmtcError),
Empty, Empty,
} }
impl From<EcssTmtcError> for TryRecvTmtcError { impl From<EcssTmtcError> for TryRecvTmtcError {
fn from(value: EcssTmtcError) -> Self { fn from(value: EcssTmtcError) -> Self {
Self::Error(value) Self::Tmtc(value)
} }
} }
impl From<PusError> for TryRecvTmtcError { impl From<PusError> for TryRecvTmtcError {
fn from(value: PusError) -> Self { fn from(value: PusError) -> Self {
Self::Error(value.into()) Self::Tmtc(value.into())
} }
} }
impl From<StoreError> for TryRecvTmtcError { impl From<StoreError> for TryRecvTmtcError {
fn from(value: StoreError) -> Self { fn from(value: StoreError) -> Self {
Self::Error(value.into()) Self::Tmtc(value.into())
} }
} }
@ -366,18 +371,15 @@ pub mod std_mod {
use crate::pool::{PoolProvider, PoolProviderWithGuards, SharedStaticMemoryPool, StoreAddr}; use crate::pool::{PoolProvider, PoolProviderWithGuards, SharedStaticMemoryPool, StoreAddr};
use crate::pus::verification::{TcStateAccepted, VerificationToken}; use crate::pus::verification::{TcStateAccepted, VerificationToken};
use crate::pus::{ use crate::pus::{
EcssChannel, EcssTcAndToken, EcssTcReceiver, EcssTcReceiverCore, EcssTmSender, EcssChannel, EcssTcAndToken, EcssTcReceiverCore, EcssTmSenderCore, EcssTmtcError,
EcssTmSenderCore, EcssTmtcError, GenericRecvError, GenericSendError, PusTmWrapper, GenericRecvError, GenericSendError, PusTmWrapper, TryRecvTmtcError,
TryRecvTmtcError,
}; };
use crate::tmtc::tm_helper::SharedTmPool; use crate::tmtc::tm_helper::SharedTmPool;
use crate::{ChannelId, TargetId}; use crate::{ChannelId, TargetId};
use alloc::boxed::Box;
use alloc::vec::Vec; use alloc::vec::Vec;
use crossbeam_channel as cb;
use spacepackets::ecss::tc::PusTcReader; use spacepackets::ecss::tc::PusTcReader;
use spacepackets::ecss::tm::PusTmCreator; use spacepackets::ecss::tm::PusTmCreator;
use spacepackets::ecss::PusError; use spacepackets::ecss::{PusError, WritablePusPacket};
use spacepackets::time::cds::TimeProvider; use spacepackets::time::cds::TimeProvider;
use spacepackets::time::StdTimestampError; use spacepackets::time::StdTimestampError;
use spacepackets::time::TimeWriter; use spacepackets::time::TimeWriter;
@ -386,6 +388,9 @@ pub mod std_mod {
use std::sync::mpsc::TryRecvError; use std::sync::mpsc::TryRecvError;
use thiserror::Error; use thiserror::Error;
#[cfg(feature = "crossbeam")]
pub use cb_mod::*;
use super::verification::VerificationReportingProvider; use super::verification::VerificationReportingProvider;
use super::{AcceptedEcssTcAndToken, TcInMemory}; use super::{AcceptedEcssTcAndToken, TcInMemory};
@ -395,32 +400,65 @@ pub mod std_mod {
} }
} }
impl From<cb::SendError<StoreAddr>> for EcssTmtcError { impl EcssTmSenderCore for mpsc::Sender<StoreAddr> {
fn from(_: cb::SendError<StoreAddr>) -> Self { fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
Self::Send(GenericSendError::RxDisconnected) match tm {
PusTmWrapper::InStore(addr) => self
.send(addr)
.map_err(|_| GenericSendError::RxDisconnected)?,
PusTmWrapper::Direct(_) => return Err(EcssTmtcError::CantSendDirectTm),
};
Ok(())
} }
} }
impl From<cb::TrySendError<StoreAddr>> for EcssTmtcError { impl EcssTmSenderCore for mpsc::SyncSender<StoreAddr> {
fn from(value: cb::TrySendError<StoreAddr>) -> Self { fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match value { match tm {
cb::TrySendError::Full(_) => Self::Send(GenericSendError::QueueFull(None)), PusTmWrapper::InStore(addr) => self
cb::TrySendError::Disconnected(_) => Self::Send(GenericSendError::RxDisconnected), .try_send(addr)
.map_err(|e| EcssTmtcError::Send(e.into()))?,
PusTmWrapper::Direct(_) => return Err(EcssTmtcError::CantSendDirectTm),
};
Ok(())
} }
} }
impl EcssTmSenderCore for mpsc::Sender<Vec<u8>> {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm {
PusTmWrapper::InStore(addr) => return Err(EcssTmtcError::CantSendAddr(addr)),
PusTmWrapper::Direct(tm) => self
.send(tm.to_vec()?)
.map_err(|e| EcssTmtcError::Send(e.into()))?,
};
Ok(())
}
}
impl EcssTmSenderCore for mpsc::SyncSender<Vec<u8>> {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm {
PusTmWrapper::InStore(addr) => return Err(EcssTmtcError::CantSendAddr(addr)),
PusTmWrapper::Direct(tm) => self
.send(tm.to_vec()?)
.map_err(|e| EcssTmtcError::Send(e.into()))?,
};
Ok(())
}
} }
#[derive(Clone)] #[derive(Clone)]
pub struct MpscTmInSharedPoolSender { pub struct TmInSharedPoolSenderWithId<Sender: EcssTmSenderCore> {
id: ChannelId, channel_id: ChannelId,
name: &'static str, name: &'static str,
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
sender: mpsc::Sender<StoreAddr>, sender: Sender,
} }
impl EcssChannel for MpscTmInSharedPoolSender { impl<Sender: EcssTmSenderCore> EcssChannel for TmInSharedPoolSenderWithId<Sender> {
fn id(&self) -> ChannelId { fn channel_id(&self) -> ChannelId {
self.id self.channel_id
} }
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
@ -428,36 +466,31 @@ pub mod std_mod {
} }
} }
impl MpscTmInSharedPoolSender { impl<Sender: EcssTmSenderCore> TmInSharedPoolSenderWithId<Sender> {
pub fn send_direct_tm(&self, tm: PusTmCreator) -> Result<(), EcssTmtcError> { pub fn send_direct_tm(&self, tm: PusTmCreator) -> Result<(), EcssTmtcError> {
let addr = self.shared_tm_store.add_pus_tm(&tm)?; let addr = self.shared_tm_store.add_pus_tm(&tm)?;
self.sender self.sender.send_tm(PusTmWrapper::InStore(addr))
.send(addr)
.map_err(|_| EcssTmtcError::Send(GenericSendError::RxDisconnected))
} }
} }
impl EcssTmSenderCore for MpscTmInSharedPoolSender { impl<Sender: EcssTmSenderCore> EcssTmSenderCore for TmInSharedPoolSenderWithId<Sender> {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> { fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm { if let PusTmWrapper::Direct(tm) = tm {
PusTmWrapper::InStore(addr) => { return self.send_direct_tm(tm);
self.sender.send(addr)?;
Ok(())
}
PusTmWrapper::Direct(tm) => self.send_direct_tm(tm),
} }
self.sender.send_tm(tm)
} }
} }
impl MpscTmInSharedPoolSender { impl<Sender: EcssTmSenderCore> TmInSharedPoolSenderWithId<Sender> {
pub fn new( pub fn new(
id: ChannelId, id: ChannelId,
name: &'static str, name: &'static str,
shared_tm_store: SharedTmPool, shared_tm_store: SharedTmPool,
sender: mpsc::Sender<StoreAddr>, sender: Sender,
) -> Self { ) -> Self {
Self { Self {
id, channel_id: id,
name, name,
shared_tm_store, shared_tm_store,
sender, sender,
@ -465,6 +498,51 @@ pub mod std_mod {
} }
} }
pub type TmInSharedPoolSenderWithMpsc = TmInSharedPoolSenderWithId<mpsc::Sender<StoreAddr>>;
pub type TmInSharedPoolSenderWithBoundedMpsc =
TmInSharedPoolSenderWithId<mpsc::SyncSender<StoreAddr>>;
/// This class can be used if frequent heap allocations during run-time are not an issue.
/// PUS TM packets will be sent around as [Vec]s. Please note that the current implementation
/// of this class can not deal with store addresses, so it is assumed that is is always
/// going to be called with direct packets.
#[derive(Clone)]
pub struct TmAsVecSenderWithId<Sender: EcssTmSenderCore> {
id: ChannelId,
name: &'static str,
sender: Sender,
}
impl From<mpsc::SendError<Vec<u8>>> for EcssTmtcError {
fn from(_: mpsc::SendError<Vec<u8>>) -> Self {
Self::Send(GenericSendError::RxDisconnected)
}
}
impl<Sender: EcssTmSenderCore> TmAsVecSenderWithId<Sender> {
pub fn new(id: u32, name: &'static str, sender: Sender) -> Self {
Self { id, sender, name }
}
}
impl<Sender: EcssTmSenderCore> EcssChannel for TmAsVecSenderWithId<Sender> {
fn channel_id(&self) -> ChannelId {
self.id
}
fn name(&self) -> &'static str {
self.name
}
}
impl<Sender: EcssTmSenderCore> EcssTmSenderCore for TmAsVecSenderWithId<Sender> {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
self.sender.send_tm(tm)
}
}
pub type TmAsVecSenderWithMpsc = TmAsVecSenderWithId<mpsc::Sender<Vec<u8>>>;
pub type TmAsVecSenderWithBoundedMpsc = TmAsVecSenderWithId<mpsc::SyncSender<Vec<u8>>>;
pub struct MpscTcReceiver { pub struct MpscTcReceiver {
id: ChannelId, id: ChannelId,
name: &'static str, name: &'static str,
@ -472,7 +550,7 @@ pub mod std_mod {
} }
impl EcssChannel for MpscTcReceiver { impl EcssChannel for MpscTcReceiver {
fn id(&self) -> ChannelId { fn channel_id(&self) -> ChannelId {
self.id self.id
} }
@ -486,7 +564,7 @@ pub mod std_mod {
self.receiver.try_recv().map_err(|e| match e { self.receiver.try_recv().map_err(|e| match e {
TryRecvError::Empty => TryRecvTmtcError::Empty, TryRecvError::Empty => TryRecvTmtcError::Empty,
TryRecvError::Disconnected => { TryRecvError::Disconnected => {
TryRecvTmtcError::Error(EcssTmtcError::from(GenericRecvError::TxDisconnected)) TryRecvTmtcError::Tmtc(EcssTmtcError::from(GenericRecvError::TxDisconnected))
} }
}) })
} }
@ -502,95 +580,50 @@ pub mod std_mod {
} }
} }
/// This class can be used if frequent heap allocations during run-time are not an issue. #[cfg(feature = "crossbeam")]
/// PUS TM packets will be sent around as [Vec]s. Please note that the current implementation pub mod cb_mod {
/// of this class can not deal with store addresses, so it is assumed that is is always use super::*;
/// going to be called with direct packets. use crossbeam_channel as cb;
#[derive(Clone)]
pub struct MpscTmAsVecSender {
id: ChannelId,
name: &'static str,
sender: mpsc::Sender<Vec<u8>>,
}
impl From<mpsc::SendError<Vec<u8>>> for EcssTmtcError { pub type TmInSharedPoolSenderWithCrossbeam =
fn from(_: mpsc::SendError<Vec<u8>>) -> Self { TmInSharedPoolSenderWithId<cb::Sender<StoreAddr>>;
impl From<cb::SendError<StoreAddr>> for EcssTmtcError {
fn from(_: cb::SendError<StoreAddr>) -> Self {
Self::Send(GenericSendError::RxDisconnected) Self::Send(GenericSendError::RxDisconnected)
} }
} }
impl MpscTmAsVecSender { impl From<cb::TrySendError<StoreAddr>> for EcssTmtcError {
pub fn new(id: u32, name: &'static str, sender: mpsc::Sender<Vec<u8>>) -> Self { fn from(value: cb::TrySendError<StoreAddr>) -> Self {
Self { id, sender, name } match value {
cb::TrySendError::Full(_) => Self::Send(GenericSendError::QueueFull(None)),
cb::TrySendError::Disconnected(_) => {
Self::Send(GenericSendError::RxDisconnected)
}
}
} }
} }
impl EcssChannel for MpscTmAsVecSender { impl EcssTmSenderCore for cb::Sender<StoreAddr> {
fn id(&self) -> ChannelId {
self.id
}
fn name(&self) -> &'static str {
self.name
}
}
impl EcssTmSenderCore for MpscTmAsVecSender {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> { fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm { match tm {
PusTmWrapper::InStore(addr) => Err(EcssTmtcError::CantSendAddr(addr)), PusTmWrapper::InStore(addr) => self
PusTmWrapper::Direct(tm) => { .try_send(addr)
let mut vec = Vec::new(); .map_err(|e| EcssTmtcError::Send(e.into()))?,
tm.append_to_vec(&mut vec).map_err(EcssTmtcError::Pus)?; PusTmWrapper::Direct(_) => return Err(EcssTmtcError::CantSendDirectTm),
self.sender.send(vec)?; };
Ok(()) Ok(())
} }
} }
} impl EcssTmSenderCore for cb::Sender<Vec<u8>> {
}
#[derive(Clone)]
pub struct CrossbeamTmInStoreSender {
id: ChannelId,
name: &'static str,
shared_tm_store: SharedTmPool,
sender: crossbeam_channel::Sender<StoreAddr>,
}
impl CrossbeamTmInStoreSender {
pub fn new(
id: ChannelId,
name: &'static str,
shared_tm_store: SharedTmPool,
sender: crossbeam_channel::Sender<StoreAddr>,
) -> Self {
Self {
id,
name,
shared_tm_store,
sender,
}
}
}
impl EcssChannel for CrossbeamTmInStoreSender {
fn id(&self) -> ChannelId {
self.id
}
fn name(&self) -> &'static str {
self.name
}
}
impl EcssTmSenderCore for CrossbeamTmInStoreSender {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> { fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm { match tm {
PusTmWrapper::InStore(addr) => self.sender.try_send(addr)?, PusTmWrapper::InStore(addr) => return Err(EcssTmtcError::CantSendAddr(addr)),
PusTmWrapper::Direct(tm) => { PusTmWrapper::Direct(tm) => self
let addr = self.shared_tm_store.add_pus_tm(&tm)?; .send(tm.to_vec()?)
self.sender.try_send(addr)?; .map_err(|e| EcssTmtcError::Send(e.into()))?,
} };
}
Ok(()) Ok(())
} }
} }
@ -612,7 +645,7 @@ pub mod std_mod {
} }
impl EcssChannel for CrossbeamTcReceiver { impl EcssChannel for CrossbeamTcReceiver {
fn id(&self) -> ChannelId { fn channel_id(&self) -> ChannelId {
self.id self.id
} }
@ -625,12 +658,13 @@ pub mod std_mod {
fn recv_tc(&self) -> Result<EcssTcAndToken, TryRecvTmtcError> { fn recv_tc(&self) -> Result<EcssTcAndToken, TryRecvTmtcError> {
self.receiver.try_recv().map_err(|e| match e { self.receiver.try_recv().map_err(|e| match e {
cb::TryRecvError::Empty => TryRecvTmtcError::Empty, cb::TryRecvError::Empty => TryRecvTmtcError::Empty,
cb::TryRecvError::Disconnected => { cb::TryRecvError::Disconnected => TryRecvTmtcError::Tmtc(EcssTmtcError::from(
TryRecvTmtcError::Error(EcssTmtcError::from(GenericRecvError::TxDisconnected)) GenericRecvError::TxDisconnected,
} )),
}) })
} }
} }
}
// TODO: All these types could probably be no_std if we implemented error handling ourselves.. // TODO: All these types could probably be no_std if we implemented error handling ourselves..
// but thiserror is really nice, so keep it like this for simplicity for now. Maybe thiserror // but thiserror is really nice, so keep it like this for simplicity for now. Maybe thiserror
@ -811,16 +845,16 @@ pub mod std_mod {
} }
} }
pub struct PusServiceBase<VerificationReporter: VerificationReportingProvider> { pub struct PusServiceBase<
pub tc_receiver: Box<dyn EcssTcReceiver>, TcReceiver: EcssTcReceiverCore,
pub tm_sender: Box<dyn EcssTmSender>, TmSender: EcssTmSenderCore,
VerificationReporter: VerificationReportingProvider,
> {
pub tc_receiver: TcReceiver,
pub tm_sender: TmSender,
pub tm_apid: u16, pub tm_apid: u16,
/// The verification handler is wrapped in a [RefCell] to allow the interior mutability
/// pattern. This makes writing methods which are not mutable a lot easier.
pub verification_handler: VerificationReporter, pub verification_handler: VerificationReporter,
} }
impl<VerificationReporter: VerificationReportingProvider> PusServiceBase<VerificationReporter> {
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn get_current_cds_short_timestamp( pub fn get_current_cds_short_timestamp(
partial_error: &mut Option<PartialPusHandlingError>, partial_error: &mut Option<PartialPusHandlingError>,
@ -839,8 +873,7 @@ pub mod std_mod {
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn get_current_timestamp_ignore_error() -> [u8; 7] { pub fn get_current_timestamp_ignore_error() -> [u8; 7] {
let mut dummy = None; let mut dummy = None;
Self::get_current_cds_short_timestamp(&mut dummy) get_current_cds_short_timestamp(&mut dummy)
}
} }
/// This is a high-level PUS packet handler helper. /// This is a high-level PUS packet handler helper.
@ -853,21 +886,25 @@ pub mod std_mod {
/// by using the [EcssTcInMemConverter] abstraction. This object provides some convenience /// by using the [EcssTcInMemConverter] abstraction. This object provides some convenience
/// methods to make the generic parts of TC handling easier. /// methods to make the generic parts of TC handling easier.
pub struct PusServiceHelper< pub struct PusServiceHelper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> { > {
pub common: PusServiceBase<VerificationReporter>, pub common: PusServiceBase<TcReceiver, TmSender, VerificationReporter>,
pub tc_in_mem_converter: TcInMemConverter, pub tc_in_mem_converter: TcInMemConverter,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> PusServiceHelper<TcInMemConverter, VerificationReporter> > PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{ {
pub fn new( pub fn new(
tc_receiver: Box<dyn EcssTcReceiver>, tc_receiver: TcReceiver,
tm_sender: Box<dyn EcssTmSender>, tm_sender: TmSender,
tm_apid: u16, tm_apid: u16,
verification_handler: VerificationReporter, verification_handler: VerificationReporter,
tc_in_mem_converter: TcInMemConverter, tc_in_mem_converter: TcInMemConverter,
@ -883,10 +920,10 @@ pub mod std_mod {
} }
} }
/// This function can be used to poll the internal [EcssTcReceiver] object for the next /// This function can be used to poll the internal [EcssTcReceiverCore] object for the next
/// telecommand packet. It will return `Ok(None)` if there are not packets available. /// telecommand packet. It will return `Ok(None)` if there are not packets available.
/// In any other case, it will perform the acceptance of the ECSS TC packet using the /// In any other case, it will perform the acceptance of the ECSS TC packet using the
/// internal [VerificationReporterWithSender] object. It will then return the telecommand /// internal [VerificationReportingProvider] object. It will then return the telecommand
/// and the according accepted token. /// and the according accepted token.
pub fn retrieve_and_accept_next_packet( pub fn retrieve_and_accept_next_packet(
&mut self, &mut self,
@ -908,12 +945,40 @@ pub mod std_mod {
})) }))
} }
Err(e) => match e { Err(e) => match e {
TryRecvTmtcError::Error(e) => Err(PusPacketHandlingError::EcssTmtc(e)), TryRecvTmtcError::Tmtc(e) => Err(PusPacketHandlingError::EcssTmtc(e)),
TryRecvTmtcError::Empty => Ok(None), TryRecvTmtcError::Empty => Ok(None),
}, },
} }
} }
} }
pub type PusServiceHelperDynWithMpsc<TcInMemConverter, VerificationReporter> = PusServiceHelper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
TcInMemConverter,
VerificationReporter,
>;
pub type PusServiceHelperDynWithBoundedMpsc<TcInMemConverter, VerificationReporter> =
PusServiceHelper<
MpscTcReceiver,
TmAsVecSenderWithBoundedMpsc,
TcInMemConverter,
VerificationReporter,
>;
pub type PusServiceHelperStaticWithMpsc<TcInMemConverter, VerificationReporter> =
PusServiceHelper<
MpscTcReceiver,
TmInSharedPoolSenderWithMpsc,
TcInMemConverter,
VerificationReporter,
>;
pub type PusServiceHelperStaticWithBoundedMpsc<TcInMemConverter, VerificationReporter> =
PusServiceHelper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
TcInMemConverter,
VerificationReporter,
>;
} }
pub(crate) fn source_buffer_large_enough(cap: usize, len: usize) -> Result<(), EcssTmtcError> { pub(crate) fn source_buffer_large_enough(cap: usize, len: usize) -> Result<(), EcssTmtcError> {
@ -935,7 +1000,6 @@ pub mod tests {
use std::sync::mpsc::TryRecvError; use std::sync::mpsc::TryRecvError;
use std::sync::{mpsc, RwLock}; use std::sync::{mpsc, RwLock};
use alloc::boxed::Box;
use alloc::collections::VecDeque; use alloc::collections::VecDeque;
use alloc::vec::Vec; use alloc::vec::Vec;
use satrs_shared::res_code::ResultU16; use satrs_shared::res_code::ResultU16;
@ -951,6 +1015,9 @@ pub mod tests {
use crate::tmtc::tm_helper::SharedTmPool; use crate::tmtc::tm_helper::SharedTmPool;
use crate::TargetId; use crate::TargetId;
use super::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use super::verification::tests::{SharedVerificationMap, TestVerificationReporter}; use super::verification::tests::{SharedVerificationMap, TestVerificationReporter};
use super::verification::{ use super::verification::{
TcStateAccepted, VerificationReporterCfg, VerificationReporterWithSender, TcStateAccepted, VerificationReporterCfg, VerificationReporterWithSender,
@ -958,8 +1025,9 @@ pub mod tests {
}; };
use super::{ use super::{
EcssTcAndToken, EcssTcInSharedStoreConverter, EcssTcInVecConverter, GenericRoutingError, EcssTcAndToken, EcssTcInSharedStoreConverter, EcssTcInVecConverter, GenericRoutingError,
MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSender, PusPacketHandlerResult, MpscTcReceiver, PusPacketHandlerResult, PusPacketHandlingError, PusRoutingErrorHandler,
PusPacketHandlingError, PusRoutingErrorHandler, PusServiceHelper, TcInMemory, PusServiceHelper, TcInMemory, TmAsVecSenderWithId, TmAsVecSenderWithMpsc,
TmInSharedPoolSenderWithBoundedMpsc, TmInSharedPoolSenderWithId,
}; };
pub const TEST_APID: u16 = 0x101; pub const TEST_APID: u16 = 0x101;
@ -1004,29 +1072,33 @@ pub mod tests {
tm_buf: [u8; 2048], tm_buf: [u8; 2048],
tc_pool: SharedStaticMemoryPool, tc_pool: SharedStaticMemoryPool,
tm_pool: SharedTmPool, tm_pool: SharedTmPool,
tc_sender: mpsc::Sender<EcssTcAndToken>, tc_sender: mpsc::SyncSender<EcssTcAndToken>,
tm_receiver: mpsc::Receiver<StoreAddr>, tm_receiver: mpsc::Receiver<StoreAddr>,
verification_handler: VerificationReporterWithSender, verification_handler: VerificationReporterWithSharedPoolMpscBoundedSender,
} }
pub type PusServiceHelperStatic = PusServiceHelper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
>;
impl PusServiceHandlerWithSharedStoreCommon { impl PusServiceHandlerWithSharedStoreCommon {
/// This function generates the structure in addition to the PUS service handler /// This function generates the structure in addition to the PUS service handler
/// [PusServiceHandler] which might be required for a specific PUS service handler. /// [PusServiceHandler] which might be required for a specific PUS service handler.
/// ///
/// The PUS service handler is instantiated with a [EcssTcInStoreConverter]. /// The PUS service handler is instantiated with a [EcssTcInStoreConverter].
pub fn new() -> ( pub fn new() -> (Self, PusServiceHelperStatic) {
Self,
PusServiceHelper<EcssTcInSharedStoreConverter, VerificationReporterWithSender>,
) {
let pool_cfg = StaticPoolConfig::new(alloc::vec![(16, 16), (8, 32), (4, 64)], false); let pool_cfg = StaticPoolConfig::new(alloc::vec![(16, 16), (8, 32), (4, 64)], false);
let tc_pool = StaticMemoryPool::new(pool_cfg.clone()); let tc_pool = StaticMemoryPool::new(pool_cfg.clone());
let tm_pool = StaticMemoryPool::new(pool_cfg); let tm_pool = StaticMemoryPool::new(pool_cfg);
let shared_tc_pool = SharedStaticMemoryPool::new(RwLock::new(tc_pool)); let shared_tc_pool = SharedStaticMemoryPool::new(RwLock::new(tc_pool));
let shared_tm_pool = SharedTmPool::new(tm_pool); let shared_tm_pool = SharedTmPool::new(tm_pool);
let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::channel(); let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::sync_channel(10);
let (tm_tx, tm_rx) = mpsc::channel(); let (tm_tx, tm_rx) = mpsc::sync_channel(10);
let verif_sender = MpscTmInSharedPoolSender::new( let verif_sender = TmInSharedPoolSenderWithBoundedMpsc::new(
0, 0,
"verif_sender", "verif_sender",
shared_tm_pool.clone(), shared_tm_pool.clone(),
@ -1034,9 +1106,9 @@ pub mod tests {
); );
let verif_cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap(); let verif_cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap();
let verification_handler = let verification_handler =
VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender)); VerificationReporterWithSharedPoolMpscBoundedSender::new(&verif_cfg, verif_sender);
let test_srv_tm_sender = let test_srv_tm_sender =
MpscTmInSharedPoolSender::new(0, "TEST_SENDER", shared_tm_pool.clone(), tm_tx); TmInSharedPoolSenderWithId::new(0, "TEST_SENDER", shared_tm_pool.clone(), tm_tx);
let test_srv_tc_receiver = MpscTcReceiver::new(0, "TEST_RECEIVER", test_srv_tc_rx); let test_srv_tc_receiver = MpscTcReceiver::new(0, "TEST_RECEIVER", test_srv_tc_rx);
let in_store_converter = let in_store_converter =
EcssTcInSharedStoreConverter::new(shared_tc_pool.clone(), 2048); EcssTcInSharedStoreConverter::new(shared_tc_pool.clone(), 2048);
@ -1051,8 +1123,8 @@ pub mod tests {
verification_handler: verification_handler.clone(), verification_handler: verification_handler.clone(),
}, },
PusServiceHelper::new( PusServiceHelper::new(
Box::new(test_srv_tc_receiver), test_srv_tc_receiver,
Box::new(test_srv_tm_sender), test_srv_tm_sender,
TEST_APID, TEST_APID,
verification_handler, verification_handler,
in_store_converter, in_store_converter,
@ -1116,21 +1188,24 @@ pub mod tests {
tm_receiver: mpsc::Receiver<alloc::vec::Vec<u8>>, tm_receiver: mpsc::Receiver<alloc::vec::Vec<u8>>,
pub verification_handler: VerificationReporter, pub verification_handler: VerificationReporter,
} }
pub type PusServiceHelperDynamic = PusServiceHelper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
>;
impl PusServiceHandlerWithVecCommon<VerificationReporterWithSender> { impl PusServiceHandlerWithVecCommon<VerificationReporterWithVecMpscSender> {
pub fn new_with_standard_verif_reporter() -> ( pub fn new_with_standard_verif_reporter() -> (Self, PusServiceHelperDynamic) {
Self,
PusServiceHelper<EcssTcInVecConverter, VerificationReporterWithSender>,
) {
let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::channel(); let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::channel();
let (tm_tx, tm_rx) = mpsc::channel(); let (tm_tx, tm_rx) = mpsc::channel();
let verif_sender = MpscTmAsVecSender::new(0, "verififcatio-sender", tm_tx.clone()); let verif_sender = TmAsVecSenderWithId::new(0, "verififcatio-sender", tm_tx.clone());
let verif_cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap(); let verif_cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap();
let verification_handler = let verification_handler =
VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender)); VerificationReporterWithSender::new(&verif_cfg, verif_sender);
let test_srv_tm_sender = MpscTmAsVecSender::new(0, "test-sender", tm_tx); let test_srv_tm_sender = TmAsVecSenderWithId::new(0, "test-sender", tm_tx);
let test_srv_tc_receiver = MpscTcReceiver::new(0, "test-receiver", test_srv_tc_rx); let test_srv_tc_receiver = MpscTcReceiver::new(0, "test-receiver", test_srv_tc_rx);
let in_store_converter = EcssTcInVecConverter::default(); let in_store_converter = EcssTcInVecConverter::default();
( (
@ -1141,8 +1216,8 @@ pub mod tests {
verification_handler: verification_handler.clone(), verification_handler: verification_handler.clone(),
}, },
PusServiceHelper::new( PusServiceHelper::new(
Box::new(test_srv_tc_receiver), test_srv_tc_receiver,
Box::new(test_srv_tm_sender), test_srv_tm_sender,
TEST_APID, TEST_APID,
verification_handler, verification_handler,
in_store_converter, in_store_converter,
@ -1154,12 +1229,17 @@ pub mod tests {
impl PusServiceHandlerWithVecCommon<TestVerificationReporter> { impl PusServiceHandlerWithVecCommon<TestVerificationReporter> {
pub fn new_with_test_verif_sender() -> ( pub fn new_with_test_verif_sender() -> (
Self, Self,
PusServiceHelper<EcssTcInVecConverter, TestVerificationReporter>, PusServiceHelper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
TestVerificationReporter,
>,
) { ) {
let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::channel(); let (test_srv_tc_tx, test_srv_tc_rx) = mpsc::channel();
let (tm_tx, tm_rx) = mpsc::channel(); let (tm_tx, tm_rx) = mpsc::channel();
let test_srv_tm_sender = MpscTmAsVecSender::new(0, "test-sender", tm_tx); let test_srv_tm_sender = TmAsVecSenderWithId::new(0, "test-sender", tm_tx);
let test_srv_tc_receiver = MpscTcReceiver::new(0, "test-receiver", test_srv_tc_rx); let test_srv_tc_receiver = MpscTcReceiver::new(0, "test-receiver", test_srv_tc_rx);
let in_store_converter = EcssTcInVecConverter::default(); let in_store_converter = EcssTcInVecConverter::default();
let shared_verif_map = SharedVerificationMap::default(); let shared_verif_map = SharedVerificationMap::default();
@ -1172,8 +1252,8 @@ pub mod tests {
verification_handler: verification_handler.clone(), verification_handler: verification_handler.clone(),
}, },
PusServiceHelper::new( PusServiceHelper::new(
Box::new(test_srv_tc_receiver), test_srv_tc_receiver,
Box::new(test_srv_tm_sender), test_srv_tm_sender,
TEST_APID, TEST_APID,
verification_handler, verification_handler,
in_store_converter, in_store_converter,

View File

@ -340,18 +340,18 @@ pub fn generate_insert_telecommand_app_data(
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub mod alloc_mod { pub mod alloc_mod {
use alloc::{
collections::{
btree_map::{Entry, Range},
BTreeMap,
},
vec::Vec,
};
use spacepackets::time::cds::{self, DaysLen24Bits};
use crate::pool::StoreAddr;
use super::*; use super::*;
use crate::pool::{PoolProvider, StoreAddr, StoreError};
use alloc::collections::btree_map::{Entry, Range};
use alloc::collections::BTreeMap;
use alloc::vec;
use alloc::vec::Vec;
use core::time::Duration;
use spacepackets::ecss::scheduling::TimeWindowType;
use spacepackets::ecss::tc::{PusTc, PusTcReader};
use spacepackets::ecss::PusPacket;
use spacepackets::time::cds::DaysLen24Bits;
use spacepackets::time::{cds, CcsdsTimeProvider, UnixTimestamp};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::time::SystemTimeError; use std::time::SystemTimeError;
@ -461,7 +461,7 @@ pub mod alloc_mod {
} }
match self.tc_map.entry(time_stamp) { match self.tc_map.entry(time_stamp) {
Entry::Vacant(e) => { Entry::Vacant(e) => {
e.insert(vec![info]); e.insert(alloc::vec![info]);
} }
Entry::Occupied(mut v) => { Entry::Occupied(mut v) => {
v.get_mut().push(info); v.get_mut().push(info);
@ -498,7 +498,7 @@ pub mod alloc_mod {
/// short timestamp with 16-bit length of days field. /// short timestamp with 16-bit length of days field.
pub fn insert_wrapped_tc_cds_short( pub fn insert_wrapped_tc_cds_short(
&mut self, &mut self,
pus_tc: &PusTc, pus_tc: &(impl IsPusTelecommand + PusPacket + GenericPusTcSecondaryHeader),
pool: &mut (impl PoolProvider + ?Sized), pool: &mut (impl PoolProvider + ?Sized),
) -> Result<TcInfo, ScheduleError> { ) -> Result<TcInfo, ScheduleError> {
self.insert_wrapped_tc::<cds::TimeProvider>(pus_tc, pool) self.insert_wrapped_tc::<cds::TimeProvider>(pus_tc, pool)
@ -508,7 +508,7 @@ pub mod alloc_mod {
/// long timestamp with a 24-bit length of days field. /// long timestamp with a 24-bit length of days field.
pub fn insert_wrapped_tc_cds_long( pub fn insert_wrapped_tc_cds_long(
&mut self, &mut self,
pus_tc: &PusTc, pus_tc: &(impl IsPusTelecommand + PusPacket + GenericPusTcSecondaryHeader),
pool: &mut (impl PoolProvider + ?Sized), pool: &mut (impl PoolProvider + ?Sized),
) -> Result<TcInfo, ScheduleError> { ) -> Result<TcInfo, ScheduleError> {
self.insert_wrapped_tc::<cds::TimeProvider<DaysLen24Bits>>(pus_tc, pool) self.insert_wrapped_tc::<cds::TimeProvider<DaysLen24Bits>>(pus_tc, pool)
@ -530,7 +530,7 @@ pub mod alloc_mod {
let range = self.retrieve_by_time_filter(time_window); let range = self.retrieve_by_time_filter(time_window);
let mut del_packets = 0; let mut del_packets = 0;
let mut res_if_fails = None; let mut res_if_fails = None;
let mut keys_to_delete = Vec::new(); let mut keys_to_delete = alloc::vec::Vec::new();
for time_bucket in range { for time_bucket in range {
for tc in time_bucket.1 { for tc in time_bucket.1 {
match pool.delete(tc.addr) { match pool.delete(tc.addr) {
@ -561,7 +561,10 @@ pub mod alloc_mod {
} }
/// Retrieve a range over all scheduled commands. /// Retrieve a range over all scheduled commands.
pub fn retrieve_all(&mut self) -> Range<'_, UnixTimestamp, Vec<TcInfo>> { pub fn retrieve_all(
&mut self,
) -> alloc::collections::btree_map::Range<'_, UnixTimestamp, alloc::vec::Vec<TcInfo>>
{
self.tc_map.range(..) self.tc_map.range(..)
} }
@ -572,7 +575,7 @@ pub mod alloc_mod {
pub fn retrieve_by_time_filter<TimeProvider: CcsdsTimeProvider>( pub fn retrieve_by_time_filter<TimeProvider: CcsdsTimeProvider>(
&mut self, &mut self,
time_window: TimeWindow<TimeProvider>, time_window: TimeWindow<TimeProvider>,
) -> Range<'_, UnixTimestamp, Vec<TcInfo>> { ) -> Range<'_, UnixTimestamp, alloc::vec::Vec<TcInfo>> {
match time_window.time_window_type() { match time_window.time_window_type() {
TimeWindowType::SelectAll => self.tc_map.range(..), TimeWindowType::SelectAll => self.tc_map.range(..),
TimeWindowType::TimeTagToTimeTag => { TimeWindowType::TimeTagToTimeTag => {
@ -761,9 +764,9 @@ pub mod alloc_mod {
mut releaser: R, mut releaser: R,
tc_store: &(impl PoolProvider + ?Sized), tc_store: &(impl PoolProvider + ?Sized),
tc_buf: &mut [u8], tc_buf: &mut [u8],
) -> Result<Vec<TcInfo>, (Vec<TcInfo>, StoreError)> { ) -> Result<alloc::vec::Vec<TcInfo>, (alloc::vec::Vec<TcInfo>, StoreError)> {
let tcs_to_release = self.telecommands_to_release(); let tcs_to_release = self.telecommands_to_release();
let mut released_tcs = Vec::new(); let mut released_tcs = alloc::vec::Vec::new();
for tc in tcs_to_release { for tc in tcs_to_release {
for info in tc.1 { for info in tc.1 {
tc_store tc_store
@ -835,7 +838,7 @@ pub mod alloc_mod {
} }
match self.tc_map.entry(time_stamp) { match self.tc_map.entry(time_stamp) {
Entry::Vacant(e) => { Entry::Vacant(e) => {
e.insert(vec![info]); e.insert(alloc::vec![info]);
} }
Entry::Occupied(mut v) => { Entry::Occupied(mut v) => {
v.get_mut().push(info); v.get_mut().push(info);

View File

@ -1,6 +1,15 @@
use super::scheduler::PusSchedulerProvider; use super::scheduler::PusSchedulerProvider;
use super::verification::VerificationReportingProvider; use super::verification::{
use super::{EcssTcInMemConverter, PusServiceBase, PusServiceHelper}; VerificationReporterWithSharedPoolMpscBoundedSender,
VerificationReporterWithSharedPoolMpscSender, VerificationReporterWithVecMpscBoundedSender,
VerificationReporterWithVecMpscSender, VerificationReportingProvider,
};
use super::{
get_current_cds_short_timestamp, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusServiceHelper,
TmAsVecSenderWithBoundedMpsc, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithMpsc,
};
use crate::pool::PoolProvider; use crate::pool::PoolProvider;
use crate::pus::{PusPacketHandlerResult, PusPacketHandlingError}; use crate::pus::{PusPacketHandlerResult, PusPacketHandlingError};
use alloc::string::ToString; use alloc::string::ToString;
@ -16,22 +25,39 @@ use spacepackets::time::cds::TimeProvider;
/// [Self::scheduler] and [Self::scheduler_mut] function and then use the scheduler API to release /// [Self::scheduler] and [Self::scheduler_mut] function and then use the scheduler API to release
/// telecommands when applicable. /// telecommands when applicable.
pub struct PusService11SchedHandler< pub struct PusService11SchedHandler<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
PusScheduler: PusSchedulerProvider, PusScheduler: PusSchedulerProvider,
> { > {
pub service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, pub service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
scheduler: PusScheduler, scheduler: PusScheduler,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
Scheduler: PusSchedulerProvider, Scheduler: PusSchedulerProvider,
> PusService11SchedHandler<TcInMemConverter, VerificationReporter, Scheduler> >
PusService11SchedHandler<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
Scheduler,
>
{ {
pub fn new( pub fn new(
service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
scheduler: Scheduler, scheduler: Scheduler,
) -> Self { ) -> Self {
Self { Self {
@ -70,9 +96,7 @@ impl<
)); ));
} }
let mut partial_error = None; let mut partial_error = None;
let time_stamp = PusServiceBase::<VerificationReporter>::get_current_cds_short_timestamp( let time_stamp = get_current_cds_short_timestamp(&mut partial_error);
&mut partial_error,
);
match standard_subservice.unwrap() { match standard_subservice.unwrap() {
scheduling::Subservice::TcEnableScheduling => { scheduling::Subservice::TcEnableScheduling => {
let start_token = self let start_token = self
@ -169,18 +193,55 @@ impl<
Ok(PusPacketHandlerResult::RequestHandled) Ok(PusPacketHandlerResult::RequestHandled)
} }
} }
/// Helper type definition for a PUS 11 handler with a dynamic TMTC memory backend and regular
/// mpsc queues.
pub type PusService11SchedHandlerDynWithMpsc<PusScheduler> = PusService11SchedHandler<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
PusScheduler,
>;
/// Helper type definition for a PUS 11 handler with a dynamic TMTC memory backend and bounded MPSC
/// queues.
pub type PusService11SchedHandlerDynWithBoundedMpsc<PusScheduler> = PusService11SchedHandler<
MpscTcReceiver,
TmAsVecSenderWithBoundedMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscBoundedSender,
PusScheduler,
>;
/// Helper type definition for a PUS 11 handler with a shared store TMTC memory backend and regular
/// mpsc queues.
pub type PusService11SchedHandlerStaticWithMpsc<PusScheduler> = PusService11SchedHandler<
MpscTcReceiver,
TmInSharedPoolSenderWithMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscSender,
PusScheduler,
>;
/// Helper type definition for a PUS 11 handler with a shared store TMTC memory backend and bounded
/// mpsc queues.
pub type PusService11SchedHandlerStaticWithBoundedMpsc<PusScheduler> = PusService11SchedHandler<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
PusScheduler,
>;
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::pool::{StaticMemoryPool, StaticPoolConfig}; use crate::pool::{StaticMemoryPool, StaticPoolConfig};
use crate::pus::tests::TEST_APID; use crate::pus::tests::TEST_APID;
use crate::pus::verification::VerificationReporterWithSender; use crate::pus::verification::VerificationReporterWithSharedPoolMpscBoundedSender;
use crate::pus::{ use crate::pus::{
scheduler::{self, PusSchedulerProvider, TcInfo}, scheduler::{self, PusSchedulerProvider, TcInfo},
tests::{PusServiceHandlerWithSharedStoreCommon, PusTestHarness}, tests::{PusServiceHandlerWithSharedStoreCommon, PusTestHarness},
verification::{RequestId, TcStateAccepted, VerificationToken}, verification::{RequestId, TcStateAccepted, VerificationToken},
EcssTcInSharedStoreConverter, EcssTcInSharedStoreConverter,
}; };
use crate::pus::{MpscTcReceiver, TmInSharedPoolSenderWithBoundedMpsc};
use alloc::collections::VecDeque; use alloc::collections::VecDeque;
use delegate::delegate; use delegate::delegate;
use spacepackets::ecss::scheduling::Subservice; use spacepackets::ecss::scheduling::Subservice;
@ -198,8 +259,10 @@ mod tests {
struct Pus11HandlerWithStoreTester { struct Pus11HandlerWithStoreTester {
common: PusServiceHandlerWithSharedStoreCommon, common: PusServiceHandlerWithSharedStoreCommon,
handler: PusService11SchedHandler< handler: PusService11SchedHandler<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter, EcssTcInSharedStoreConverter,
VerificationReporterWithSender, VerificationReporterWithSharedPoolMpscBoundedSender,
TestScheduler, TestScheduler,
>, >,
sched_tc_pool: StaticMemoryPool, sched_tc_pool: StaticMemoryPool,

View File

@ -5,24 +5,45 @@ use spacepackets::ecss::tm::{PusTmCreator, PusTmSecondaryHeader};
use spacepackets::ecss::PusPacket; use spacepackets::ecss::PusPacket;
use spacepackets::SpHeader; use spacepackets::SpHeader;
use super::verification::VerificationReportingProvider; use super::verification::{
use super::{EcssTcInMemConverter, PusServiceBase, PusServiceHelper}; VerificationReporterWithSharedPoolMpscBoundedSender,
VerificationReporterWithSharedPoolMpscSender, VerificationReporterWithVecMpscBoundedSender,
VerificationReporterWithVecMpscSender, VerificationReportingProvider,
};
use super::{
get_current_cds_short_timestamp, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusServiceHelper,
TmAsVecSenderWithBoundedMpsc, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithMpsc,
};
/// This is a helper class for [std] environments to handle generic PUS 17 (test service) packets. /// This is a helper class for [std] environments to handle generic PUS 17 (test service) packets.
/// This handler only processes ping requests and generates a ping reply for them accordingly. /// This handler only processes ping requests and generates a ping reply for them accordingly.
pub struct PusService17TestHandler< pub struct PusService17TestHandler<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> { > {
pub service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>, pub service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
} }
impl< impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter, TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider, VerificationReporter: VerificationReportingProvider,
> PusService17TestHandler<TcInMemConverter, VerificationReporter> > PusService17TestHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
{ {
pub fn new(service_helper: PusServiceHelper<TcInMemConverter, VerificationReporter>) -> Self { pub fn new(
service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
) -> Self {
Self { service_helper } Self { service_helper }
} }
@ -41,10 +62,7 @@ impl<
} }
if tc.subservice() == 1 { if tc.subservice() == 1 {
let mut partial_error = None; let mut partial_error = None;
let time_stamp = let time_stamp = get_current_cds_short_timestamp(&mut partial_error);
PusServiceBase::<VerificationReporter>::get_current_cds_short_timestamp(
&mut partial_error,
);
let result = self let result = self
.service_helper .service_helper
.common .common
@ -98,17 +116,53 @@ impl<
} }
} }
/// Helper type definition for a PUS 17 handler with a dynamic TMTC memory backend and regular
/// mpsc queues.
pub type PusService17TestHandlerDynWithMpsc = PusService17TestHandler<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
>;
/// Helper type definition for a PUS 17 handler with a dynamic TMTC memory backend and bounded MPSC
/// queues.
pub type PusService17TestHandlerDynWithBoundedMpsc = PusService17TestHandler<
MpscTcReceiver,
TmAsVecSenderWithBoundedMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscBoundedSender,
>;
/// Helper type definition for a PUS 17 handler with a shared store TMTC memory backend and regular
/// mpsc queues.
pub type PusService17TestHandlerStaticWithMpsc = PusService17TestHandler<
MpscTcReceiver,
TmInSharedPoolSenderWithMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscSender,
>;
/// Helper type definition for a PUS 17 handler with a shared store TMTC memory backend and bounded
/// mpsc queues.
pub type PusService17TestHandlerStaticWithBoundedMpsc = PusService17TestHandler<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
>;
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::pus::tests::{ use crate::pus::tests::{
PusServiceHandlerWithSharedStoreCommon, PusServiceHandlerWithVecCommon, PusTestHarness, PusServiceHandlerWithSharedStoreCommon, PusServiceHandlerWithVecCommon, PusTestHarness,
SimplePusPacketHandler, TEST_APID, SimplePusPacketHandler, TEST_APID,
}; };
use crate::pus::verification::{RequestId, VerificationReporterWithSender}; use crate::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use crate::pus::verification::RequestId;
use crate::pus::verification::{TcStateAccepted, VerificationToken}; use crate::pus::verification::{TcStateAccepted, VerificationToken};
use crate::pus::{ use crate::pus::{
EcssTcInSharedStoreConverter, EcssTcInVecConverter, PusPacketHandlerResult, EcssTcInSharedStoreConverter, EcssTcInVecConverter, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, PusPacketHandlingError, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
}; };
use delegate::delegate; use delegate::delegate;
use spacepackets::ecss::tc::{PusTcCreator, PusTcSecondaryHeader}; use spacepackets::ecss::tc::{PusTcCreator, PusTcSecondaryHeader};
@ -120,8 +174,12 @@ mod tests {
struct Pus17HandlerWithStoreTester { struct Pus17HandlerWithStoreTester {
common: PusServiceHandlerWithSharedStoreCommon, common: PusServiceHandlerWithSharedStoreCommon,
handler: handler: PusService17TestHandler<
PusService17TestHandler<EcssTcInSharedStoreConverter, VerificationReporterWithSender>, MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
>,
} }
impl Pus17HandlerWithStoreTester { impl Pus17HandlerWithStoreTester {
@ -158,8 +216,13 @@ mod tests {
} }
struct Pus17HandlerWithVecTester { struct Pus17HandlerWithVecTester {
common: PusServiceHandlerWithVecCommon<VerificationReporterWithSender>, common: PusServiceHandlerWithVecCommon<VerificationReporterWithVecMpscSender>,
handler: PusService17TestHandler<EcssTcInVecConverter, VerificationReporterWithSender>, handler: PusService17TestHandler<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
>,
} }
impl Pus17HandlerWithVecTester { impl Pus17HandlerWithVecTester {

View File

@ -20,7 +20,7 @@
//! VerificationReportingProvider, VerificationReporterCfg, VerificationReporterWithSender //! VerificationReportingProvider, VerificationReporterCfg, VerificationReporterWithSender
//! }; //! };
//! use satrs::seq_count::SeqCountProviderSimple; //! use satrs::seq_count::SeqCountProviderSimple;
//! use satrs::pus::MpscTmInSharedPoolSender; //! use satrs::pus::TmInSharedPoolSenderWithMpsc;
//! use satrs::tmtc::tm_helper::SharedTmPool; //! use satrs::tmtc::tm_helper::SharedTmPool;
//! use spacepackets::ecss::PusPacket; //! use spacepackets::ecss::PusPacket;
//! use spacepackets::SpHeader; //! use spacepackets::SpHeader;
@ -35,9 +35,9 @@
//! let shared_tm_store = SharedTmPool::new(tm_pool); //! let shared_tm_store = SharedTmPool::new(tm_pool);
//! let tm_store = shared_tm_store.clone_backing_pool(); //! let tm_store = shared_tm_store.clone_backing_pool();
//! let (verif_tx, verif_rx) = mpsc::channel(); //! let (verif_tx, verif_rx) = mpsc::channel();
//! let sender = MpscTmInSharedPoolSender::new(0, "Test Sender", shared_tm_store, verif_tx); //! let sender = TmInSharedPoolSenderWithMpsc::new(0, "Test Sender", shared_tm_store, verif_tx);
//! let cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap(); //! let cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap();
//! let mut reporter = VerificationReporterWithSender::new(&cfg , Box::new(sender)); //! let mut reporter = VerificationReporterWithSender::new(&cfg , sender);
//! //!
//! let mut sph = SpHeader::tc_unseg(TEST_APID, 0, 0).unwrap(); //! let mut sph = SpHeader::tc_unseg(TEST_APID, 0, 0).unwrap();
//! let tc_header = PusTcSecondaryHeader::new_simple(17, 1); //! let tc_header = PusTcSecondaryHeader::new_simple(17, 1);
@ -95,10 +95,11 @@ pub use crate::seq_count::SeqCountProviderSimple;
pub use spacepackets::ecss::verification::*; pub use spacepackets::ecss::verification::*;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use alloc_mod::{ #[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "alloc")))]
VerificationReporter, VerificationReporterCfg, VerificationReporterWithSender, pub use alloc_mod::*;
};
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "std")))]
pub use std_mod::*; pub use std_mod::*;
/// This is a request identifier as specified in 5.4.11.2 c. of the PUS standard. /// This is a request identifier as specified in 5.4.11.2 c. of the PUS standard.
@ -949,15 +950,13 @@ impl VerificationReporterCore {
} }
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
mod alloc_mod { pub mod alloc_mod {
use super::*; use super::*;
use crate::pus::alloc_mod::EcssTmSender; use crate::{
use crate::seq_count::SequenceCountProvider; pus::{TmAsVecSenderWithId, TmInSharedPoolSenderWithId},
use alloc::boxed::Box; seq_count::SequenceCountProvider,
use alloc::vec; };
use alloc::vec::Vec;
use core::cell::RefCell; use core::cell::RefCell;
use spacepackets::ecss::tc::IsPusTelecommand;
#[derive(Clone)] #[derive(Clone)]
pub struct VerificationReporterCfg { pub struct VerificationReporterCfg {
@ -992,9 +991,9 @@ mod alloc_mod {
/// TM funnel. This helper will always set those fields to 0. /// TM funnel. This helper will always set those fields to 0.
#[derive(Clone)] #[derive(Clone)]
pub struct VerificationReporter { pub struct VerificationReporter {
source_data_buf: RefCell<Vec<u8>>, source_data_buf: RefCell<alloc::vec::Vec<u8>>,
pub seq_count_provider: Option<Box<dyn SequenceCountProvider<u16> + Send>>, pub seq_count_provider: Option<alloc::boxed::Box<dyn SequenceCountProvider<u16> + Send>>,
pub msg_count_provider: Option<Box<dyn SequenceCountProvider<u16> + Send>>, pub msg_count_provider: Option<alloc::boxed::Box<dyn SequenceCountProvider<u16> + Send>>,
pub reporter: VerificationReporterCore, pub reporter: VerificationReporterCore,
} }
@ -1002,7 +1001,7 @@ mod alloc_mod {
pub fn new(cfg: &VerificationReporterCfg) -> Self { pub fn new(cfg: &VerificationReporterCfg) -> Self {
let reporter = VerificationReporterCore::new(cfg.apid).unwrap(); let reporter = VerificationReporterCore::new(cfg.apid).unwrap();
Self { Self {
source_data_buf: RefCell::new(vec![ source_data_buf: RefCell::new(alloc::vec![
0; 0;
RequestId::SIZE_AS_BYTES RequestId::SIZE_AS_BYTES
+ cfg.step_field_width + cfg.step_field_width
@ -1269,21 +1268,18 @@ mod alloc_mod {
/// Helper object which caches the sender passed as a trait object. Provides the same /// Helper object which caches the sender passed as a trait object. Provides the same
/// API as [VerificationReporter] but without the explicit sender arguments. /// API as [VerificationReporter] but without the explicit sender arguments.
#[derive(Clone)] #[derive(Clone)]
pub struct VerificationReporterWithSender { pub struct VerificationReporterWithSender<Sender: EcssTmSenderCore + Clone> {
pub reporter: VerificationReporter, pub reporter: VerificationReporter,
pub sender: Box<dyn EcssTmSender>, pub sender: Sender,
} }
impl VerificationReporterWithSender { impl<Sender: EcssTmSenderCore + Clone> VerificationReporterWithSender<Sender> {
pub fn new(cfg: &VerificationReporterCfg, sender: Box<dyn EcssTmSender>) -> Self { pub fn new(cfg: &VerificationReporterCfg, sender: Sender) -> Self {
let reporter = VerificationReporter::new(cfg); let reporter = VerificationReporter::new(cfg);
Self::new_from_reporter(reporter, sender) Self::new_from_reporter(reporter, sender)
} }
pub fn new_from_reporter( pub fn new_from_reporter(reporter: VerificationReporter, sender: Sender) -> Self {
reporter: VerificationReporter,
sender: Box<dyn EcssTmSender>,
) -> Self {
Self { reporter, sender } Self { reporter, sender }
} }
@ -1297,7 +1293,9 @@ mod alloc_mod {
} }
} }
impl VerificationReportingProvider for VerificationReporterWithSender { impl<Sender: EcssTmSenderCore + Clone> VerificationReportingProvider
for VerificationReporterWithSender<Sender>
{
delegate! { delegate! {
to self.reporter { to self.reporter {
fn add_tc( fn add_tc(
@ -1315,7 +1313,7 @@ mod alloc_mod {
) -> Result<VerificationToken<TcStateAccepted>, VerificationOrSendErrorWithToken<TcStateNone>> ) -> Result<VerificationToken<TcStateAccepted>, VerificationOrSendErrorWithToken<TcStateNone>>
{ {
self.reporter self.reporter
.acceptance_success(token, self.sender.as_ref(), time_stamp) .acceptance_success(token, &self.sender, time_stamp)
} }
fn acceptance_failure( fn acceptance_failure(
@ -1324,7 +1322,7 @@ mod alloc_mod {
params: FailParams, params: FailParams,
) -> Result<(), VerificationOrSendErrorWithToken<TcStateNone>> { ) -> Result<(), VerificationOrSendErrorWithToken<TcStateNone>> {
self.reporter self.reporter
.acceptance_failure(token, self.sender.as_ref(), params) .acceptance_failure(token, &self.sender, params)
} }
fn start_success( fn start_success(
@ -1335,8 +1333,7 @@ mod alloc_mod {
VerificationToken<TcStateStarted>, VerificationToken<TcStateStarted>,
VerificationOrSendErrorWithToken<TcStateAccepted>, VerificationOrSendErrorWithToken<TcStateAccepted>,
> { > {
self.reporter self.reporter.start_success(token, &self.sender, time_stamp)
.start_success(token, self.sender.as_ref(), time_stamp)
} }
fn start_failure( fn start_failure(
@ -1344,8 +1341,7 @@ mod alloc_mod {
token: VerificationToken<TcStateAccepted>, token: VerificationToken<TcStateAccepted>,
params: FailParams, params: FailParams,
) -> Result<(), VerificationOrSendErrorWithToken<TcStateAccepted>> { ) -> Result<(), VerificationOrSendErrorWithToken<TcStateAccepted>> {
self.reporter self.reporter.start_failure(token, &self.sender, params)
.start_failure(token, self.sender.as_ref(), params)
} }
fn step_success( fn step_success(
@ -1355,7 +1351,7 @@ mod alloc_mod {
step: impl EcssEnumeration, step: impl EcssEnumeration,
) -> Result<(), EcssTmtcError> { ) -> Result<(), EcssTmtcError> {
self.reporter self.reporter
.step_success(token, self.sender.as_ref(), time_stamp, step) .step_success(token, &self.sender, time_stamp, step)
} }
fn step_failure( fn step_failure(
@ -1363,8 +1359,7 @@ mod alloc_mod {
token: VerificationToken<TcStateStarted>, token: VerificationToken<TcStateStarted>,
params: FailParamsWithStep, params: FailParamsWithStep,
) -> Result<(), VerificationOrSendErrorWithToken<TcStateStarted>> { ) -> Result<(), VerificationOrSendErrorWithToken<TcStateStarted>> {
self.reporter self.reporter.step_failure(token, &self.sender, params)
.step_failure(token, self.sender.as_ref(), params)
} }
fn completion_success<TcState: WasAtLeastAccepted + Copy>( fn completion_success<TcState: WasAtLeastAccepted + Copy>(
@ -1373,7 +1368,7 @@ mod alloc_mod {
time_stamp: &[u8], time_stamp: &[u8],
) -> Result<(), VerificationOrSendErrorWithToken<TcState>> { ) -> Result<(), VerificationOrSendErrorWithToken<TcState>> {
self.reporter self.reporter
.completion_success(token, self.sender.as_ref(), time_stamp) .completion_success(token, &self.sender, time_stamp)
} }
fn completion_failure<TcState: WasAtLeastAccepted + Copy>( fn completion_failure<TcState: WasAtLeastAccepted + Copy>(
@ -1382,18 +1377,34 @@ mod alloc_mod {
params: FailParams, params: FailParams,
) -> Result<(), VerificationOrSendErrorWithToken<TcState>> { ) -> Result<(), VerificationOrSendErrorWithToken<TcState>> {
self.reporter self.reporter
.completion_failure(token, self.sender.as_ref(), params) .completion_failure(token, &self.sender, params)
} }
} }
pub type VerificationReporterWithSharedPoolSender<Sender> =
VerificationReporterWithSender<TmInSharedPoolSenderWithId<Sender>>;
pub type VerificationReporterWithVecSender<Sender> =
VerificationReporterWithSender<TmAsVecSenderWithId<Sender>>;
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
mod std_mod { pub mod std_mod {
use crate::pus::verification::VerificationReporterWithSender; use std::sync::mpsc;
use std::sync::{Arc, Mutex};
pub type StdVerifReporterWithSender = VerificationReporterWithSender; use crate::pool::StoreAddr;
pub type SharedStdVerifReporterWithSender = Arc<Mutex<StdVerifReporterWithSender>>;
use super::alloc_mod::{
VerificationReporterWithSharedPoolSender, VerificationReporterWithVecSender,
};
pub type VerificationReporterWithSharedPoolMpscSender =
VerificationReporterWithSharedPoolSender<mpsc::Sender<StoreAddr>>;
pub type VerificationReporterWithSharedPoolMpscBoundedSender =
VerificationReporterWithSharedPoolSender<mpsc::SyncSender<StoreAddr>>;
pub type VerificationReporterWithVecMpscSender =
VerificationReporterWithVecSender<mpsc::Sender<alloc::vec::Vec<u8>>>;
pub type VerificationReporterWithVecMpscBoundedSender =
VerificationReporterWithVecSender<mpsc::SyncSender<alloc::vec::Vec<u8>>>;
} }
#[cfg(test)] #[cfg(test)]
@ -1405,10 +1416,11 @@ pub mod tests {
VerificationReporter, VerificationReporterCfg, VerificationReporterWithSender, VerificationReporter, VerificationReporterCfg, VerificationReporterWithSender,
VerificationToken, VerificationToken,
}; };
use crate::pus::{EcssChannel, MpscTmInSharedPoolSender, PusTmWrapper}; use crate::pus::{
EcssChannel, PusTmWrapper, TmInSharedPoolSenderWithId, TmInSharedPoolSenderWithMpsc,
};
use crate::tmtc::tm_helper::SharedTmPool; use crate::tmtc::tm_helper::SharedTmPool;
use crate::ChannelId; use crate::ChannelId;
use alloc::boxed::Box;
use alloc::format; use alloc::format;
use alloc::sync::Arc; use alloc::sync::Arc;
use hashbrown::HashMap; use hashbrown::HashMap;
@ -1637,7 +1649,7 @@ pub mod tests {
} }
impl EcssChannel for TestSender { impl EcssChannel for TestSender {
fn id(&self) -> ChannelId { fn channel_id(&self) -> ChannelId {
0 0
} }
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
@ -1688,13 +1700,13 @@ pub mod tests {
&mut self.vr &mut self.vr
} }
} }
struct TestBaseWithHelper<'a> { struct TestBaseWithHelper<'a, Sender: EcssTmSenderCore + Clone + 'static> {
helper: VerificationReporterWithSender, helper: VerificationReporterWithSender<Sender>,
#[allow(dead_code)] #[allow(dead_code)]
tc: PusTcCreator<'a>, tc: PusTcCreator<'a>,
} }
impl<'a> TestBaseWithHelper<'a> { impl<'a, Sender: EcssTmSenderCore + Clone + 'static> TestBaseWithHelper<'a, Sender> {
fn rep(&mut self) -> &mut VerificationReporter { fn rep(&mut self) -> &mut VerificationReporter {
&mut self.helper.reporter &mut self.helper.reporter
} }
@ -1725,12 +1737,15 @@ pub mod tests {
(TestBase { vr: reporter, tc }, init_tok) (TestBase { vr: reporter, tc }, init_tok)
} }
fn base_with_helper_init() -> (TestBaseWithHelper<'static>, VerificationToken<TcStateNone>) { fn base_with_helper_init() -> (
TestBaseWithHelper<'static, TestSender>,
VerificationToken<TcStateNone>,
) {
let mut reporter = base_reporter(); let mut reporter = base_reporter();
let (tc, _) = base_tc_init(None); let (tc, _) = base_tc_init(None);
let init_tok = reporter.add_tc(&tc); let init_tok = reporter.add_tc(&tc);
let sender = TestSender::default(); let sender = TestSender::default();
let helper = VerificationReporterWithSender::new_from_reporter(reporter, Box::new(sender)); let helper = VerificationReporterWithSender::new_from_reporter(reporter, sender);
(TestBaseWithHelper { helper, tc }, init_tok) (TestBaseWithHelper { helper, tc }, init_tok)
} }
@ -1758,7 +1773,7 @@ pub mod tests {
let shared_tm_store = SharedTmPool::new(pool); let shared_tm_store = SharedTmPool::new(pool);
let (tx, _) = mpsc::channel(); let (tx, _) = mpsc::channel();
let mpsc_verif_sender = let mpsc_verif_sender =
MpscTmInSharedPoolSender::new(0, "verif_sender", shared_tm_store, tx); TmInSharedPoolSenderWithMpsc::new(0, "verif_sender", shared_tm_store, tx);
is_send(&mpsc_verif_sender); is_send(&mpsc_verif_sender);
} }
@ -1785,8 +1800,7 @@ pub mod tests {
b.helper b.helper
.acceptance_success(tok, &EMPTY_STAMP) .acceptance_success(tok, &EMPTY_STAMP)
.expect("Sending acceptance success failed"); .expect("Sending acceptance success failed");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); acceptance_check(&mut b.helper.sender, &tok.req_id);
acceptance_check(sender, &tok.req_id);
} }
fn acceptance_fail_check(sender: &mut TestSender, req_id: RequestId, stamp_buf: [u8; 7]) { fn acceptance_fail_check(sender: &mut TestSender, req_id: RequestId, stamp_buf: [u8; 7]) {
@ -1830,8 +1844,7 @@ pub mod tests {
b.helper b.helper
.acceptance_failure(tok, fail_params) .acceptance_failure(tok, fail_params)
.expect("Sending acceptance success failed"); .expect("Sending acceptance success failed");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); acceptance_fail_check(&mut b.helper.sender, tok.req_id, stamp_buf);
acceptance_fail_check(sender, tok.req_id, stamp_buf);
} }
#[test] #[test]
@ -1961,8 +1974,7 @@ pub mod tests {
b.helper b.helper
.start_failure(accepted_token, fail_params) .start_failure(accepted_token, fail_params)
.expect("Start failure failure"); .expect("Start failure failure");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); start_fail_check(&mut b.helper.sender, tok.req_id, fail_data_raw);
start_fail_check(sender, tok.req_id, fail_data_raw);
} }
fn step_success_check(sender: &mut TestSender, req_id: RequestId) { fn step_success_check(sender: &mut TestSender, req_id: RequestId) {
@ -2059,9 +2071,8 @@ pub mod tests {
b.helper b.helper
.step_success(&started_token, &EMPTY_STAMP, EcssEnumU8::new(1)) .step_success(&started_token, &EMPTY_STAMP, EcssEnumU8::new(1))
.expect("Sending step 1 success failed"); .expect("Sending step 1 success failed");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); assert_eq!(b.helper.sender.service_queue.borrow().len(), 4);
assert_eq!(sender.service_queue.borrow().len(), 4); step_success_check(&mut b.helper.sender, tok.req_id);
step_success_check(sender, tok.req_id);
} }
fn check_step_failure(sender: &mut TestSender, req_id: RequestId, fail_data_raw: [u8; 4]) { fn check_step_failure(sender: &mut TestSender, req_id: RequestId, fail_data_raw: [u8; 4]) {
@ -2191,8 +2202,7 @@ pub mod tests {
b.helper b.helper
.step_failure(started_token, fail_params) .step_failure(started_token, fail_params)
.expect("Step failure failed"); .expect("Step failure failed");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); check_step_failure(&mut b.helper.sender, req_id, fail_data_raw);
check_step_failure(sender, req_id, fail_data_raw);
} }
fn completion_fail_check(sender: &mut TestSender, req_id: RequestId) { fn completion_fail_check(sender: &mut TestSender, req_id: RequestId) {
@ -2278,8 +2288,7 @@ pub mod tests {
b.helper b.helper
.completion_failure(started_token, fail_params) .completion_failure(started_token, fail_params)
.expect("Completion failure"); .expect("Completion failure");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); completion_fail_check(&mut b.helper.sender, req_id);
completion_fail_check(sender, req_id);
} }
fn completion_success_check(sender: &mut TestSender, req_id: RequestId) { fn completion_success_check(sender: &mut TestSender, req_id: RequestId) {
@ -2355,8 +2364,7 @@ pub mod tests {
b.helper b.helper
.completion_success(started_token, &EMPTY_STAMP) .completion_success(started_token, &EMPTY_STAMP)
.expect("Sending completion success failed"); .expect("Sending completion success failed");
let sender: &mut TestSender = b.helper.sender.downcast_mut().unwrap(); completion_success_check(&mut b.helper.sender, tok.req_id);
completion_success_check(sender, tok.req_id);
} }
#[test] #[test]
@ -2368,9 +2376,9 @@ pub mod tests {
let shared_tm_pool = shared_tm_store.clone_backing_pool(); let shared_tm_pool = shared_tm_store.clone_backing_pool();
let (verif_tx, verif_rx) = mpsc::channel(); let (verif_tx, verif_rx) = mpsc::channel();
let sender = let sender =
MpscTmInSharedPoolSender::new(0, "Verification Sender", shared_tm_store, verif_tx); TmInSharedPoolSenderWithId::new(0, "Verification Sender", shared_tm_store, verif_tx);
let cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap(); let cfg = VerificationReporterCfg::new(TEST_APID, 1, 2, 8).unwrap();
let mut reporter = VerificationReporterWithSender::new(&cfg, Box::new(sender)); let mut reporter = VerificationReporterWithSender::new(&cfg, sender);
let mut sph = SpHeader::tc_unseg(TEST_APID, 0, 0).unwrap(); let mut sph = SpHeader::tc_unseg(TEST_APID, 0, 0).unwrap();
let tc_header = PusTcSecondaryHeader::new_simple(17, 1); let tc_header = PusTcSecondaryHeader::new_simple(17, 1);

View File

@ -1,6 +1,8 @@
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error; use std::error::Error;
#[cfg(feature = "std")]
use std::sync::mpsc;
/// Generic error type for sending something via a message queue. /// Generic error type for sending something via a message queue.
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -47,3 +49,37 @@ impl Display for GenericRecvError {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl Error for GenericRecvError {} impl Error for GenericRecvError {}
#[cfg(feature = "std")]
impl<T> From<mpsc::SendError<T>> for GenericSendError {
fn from(_: mpsc::SendError<T>) -> Self {
GenericSendError::RxDisconnected
}
}
#[cfg(feature = "std")]
impl<T> From<mpsc::TrySendError<T>> for GenericSendError {
fn from(err: mpsc::TrySendError<T>) -> Self {
match err {
mpsc::TrySendError::Full(_) => GenericSendError::QueueFull(None),
mpsc::TrySendError::Disconnected(_) => GenericSendError::RxDisconnected,
}
}
}
#[cfg(feature = "crossbeam")]
impl<T> From<crossbeam_channel::SendError<T>> for GenericSendError {
fn from(_: crossbeam_channel::SendError<T>) -> Self {
GenericSendError::RxDisconnected
}
}
#[cfg(feature = "crossbeam")]
impl<T> From<crossbeam_channel::TrySendError<T>> for GenericSendError {
fn from(err: crossbeam_channel::TrySendError<T>) -> Self {
match err {
crossbeam_channel::TrySendError::Full(_) => GenericSendError::QueueFull(None),
crossbeam_channel::TrySendError::Disconnected(_) => GenericSendError::RxDisconnected,
}
}
}

View File

@ -52,7 +52,7 @@
//! } //! }
//! //!
//! let apid_handler = ConcreteApidHandler::default(); //! let apid_handler = ConcreteApidHandler::default();
//! let mut ccsds_distributor = CcsdsDistributor::new(Box::new(apid_handler)); //! let mut ccsds_distributor = CcsdsDistributor::new(apid_handler);
//! //!
//! // Create and pass PUS telecommand with a valid APID //! // Create and pass PUS telecommand with a valid APID
//! let mut space_packet_header = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap(); //! let mut space_packet_header = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap();
@ -72,23 +72,17 @@
//! let tc_slice = &test_buf[0..size]; //! let tc_slice = &test_buf[0..size];
//! ccsds_distributor.pass_tc(&tc_slice).expect("Passing TC slice failed"); //! ccsds_distributor.pass_tc(&tc_slice).expect("Passing TC slice failed");
//! //!
//! // User helper function to retrieve concrete class //! // Retrieve the APID handler.
//! let concrete_handler_ref: &ConcreteApidHandler = ccsds_distributor //! let handler_ref = ccsds_distributor.packet_handler();
//! .apid_handler_ref() //! assert_eq!(handler_ref.known_call_count, 1);
//! .expect("Casting back to concrete type failed"); //! assert_eq!(handler_ref.unknown_call_count, 1);
//! assert_eq!(concrete_handler_ref.known_call_count, 1);
//! assert_eq!(concrete_handler_ref.unknown_call_count, 1);
//! //!
//! // It's also possible to retrieve a mutable reference //! // Mutable access to the handler.
//! let mutable_ref: &mut ConcreteApidHandler = ccsds_distributor //! let mutable_handler_ref = ccsds_distributor.packet_handler_mut();
//! .apid_handler_mut() //! mutable_handler_ref.mutable_foo();
//! .expect("Casting back to concrete type failed");
//! mutable_ref.mutable_foo();
//! ``` //! ```
use crate::tmtc::{ReceivesCcsdsTc, ReceivesTcCore}; use crate::tmtc::{ReceivesCcsdsTc, ReceivesTcCore};
use alloc::boxed::Box;
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
use downcast_rs::Downcast;
use spacepackets::{ByteConversionError, CcsdsPacket, SpHeader}; use spacepackets::{ByteConversionError, CcsdsPacket, SpHeader};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error; use std::error::Error;
@ -99,11 +93,7 @@ use std::error::Error;
/// instance of this handler to the [CcsdsDistributor]. The distributor will use the trait /// instance of this handler to the [CcsdsDistributor]. The distributor will use the trait
/// interface to dispatch received packets to the user based on the Application Process Identifier /// interface to dispatch received packets to the user based on the Application Process Identifier
/// (APID) field of the CCSDS packet. /// (APID) field of the CCSDS packet.
/// pub trait CcsdsPacketHandler {
/// This trait automatically implements the [downcast_rs::Downcast] to allow a more convenient API
/// to cast trait objects back to their concrete type after the handler was passed to the
/// distributor.
pub trait CcsdsPacketHandler: Downcast {
type Error; type Error;
fn valid_apids(&self) -> &'static [u16]; fn valid_apids(&self) -> &'static [u16];
@ -116,23 +106,12 @@ pub trait CcsdsPacketHandler: Downcast {
) -> Result<(), Self::Error>; ) -> Result<(), Self::Error>;
} }
downcast_rs::impl_downcast!(CcsdsPacketHandler assoc Error);
pub trait SendableCcsdsPacketHandler: CcsdsPacketHandler + Send {}
impl<T: CcsdsPacketHandler + Send> SendableCcsdsPacketHandler for T {}
downcast_rs::impl_downcast!(SendableCcsdsPacketHandler assoc Error);
/// The CCSDS distributor dispatches received CCSDS packets to a user provided packet handler. /// The CCSDS distributor dispatches received CCSDS packets to a user provided packet handler.
/// pub struct CcsdsDistributor<PacketHandler: CcsdsPacketHandler<Error = E>, E> {
/// The passed APID handler is required to be [Send]able to allow more ergonomic usage with
/// threads.
pub struct CcsdsDistributor<E> {
/// User provided APID handler stored as a generic trait object. /// User provided APID handler stored as a generic trait object.
/// It can be cast back to the original concrete type using the [Self::apid_handler_ref] or /// It can be cast back to the original concrete type using [Self::packet_handler] or
/// the [Self::apid_handler_mut] method. /// the [Self::packet_handler_mut] method.
pub apid_handler: Box<dyn SendableCcsdsPacketHandler<Error = E>>, packet_handler: PacketHandler,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -160,7 +139,9 @@ impl<E: Error> Error for CcsdsError<E> {
} }
} }
impl<E: 'static> ReceivesCcsdsTc for CcsdsDistributor<E> { impl<PacketHandler: CcsdsPacketHandler<Error = E>, E: 'static> ReceivesCcsdsTc
for CcsdsDistributor<PacketHandler, E>
{
type Error = CcsdsError<E>; type Error = CcsdsError<E>;
fn pass_ccsds(&mut self, header: &SpHeader, tc_raw: &[u8]) -> Result<(), Self::Error> { fn pass_ccsds(&mut self, header: &SpHeader, tc_raw: &[u8]) -> Result<(), Self::Error> {
@ -168,7 +149,9 @@ impl<E: 'static> ReceivesCcsdsTc for CcsdsDistributor<E> {
} }
} }
impl<E: 'static> ReceivesTcCore for CcsdsDistributor<E> { impl<PacketHandler: CcsdsPacketHandler<Error = E>, E: 'static> ReceivesTcCore
for CcsdsDistributor<PacketHandler, E>
{
type Error = CcsdsError<E>; type Error = CcsdsError<E>;
fn pass_tc(&mut self, tc_raw: &[u8]) -> Result<(), Self::Error> { fn pass_tc(&mut self, tc_raw: &[u8]) -> Result<(), Self::Error> {
@ -186,36 +169,31 @@ impl<E: 'static> ReceivesTcCore for CcsdsDistributor<E> {
} }
} }
impl<E: 'static> CcsdsDistributor<E> { impl<PacketHandler: CcsdsPacketHandler<Error = E>, E: 'static> CcsdsDistributor<PacketHandler, E> {
pub fn new(apid_handler: Box<dyn SendableCcsdsPacketHandler<Error = E>>) -> Self { pub fn new(packet_handler: PacketHandler) -> Self {
CcsdsDistributor { apid_handler } CcsdsDistributor { packet_handler }
} }
/// This function can be used to retrieve a reference to the concrete instance of the APID pub fn packet_handler(&self) -> &PacketHandler {
/// handler after it was passed to the distributor. See the &self.packet_handler
/// [module documentation][crate::tmtc::ccsds_distrib] for an fsrc-example.
pub fn apid_handler_ref<T: SendableCcsdsPacketHandler<Error = E>>(&self) -> Option<&T> {
self.apid_handler.downcast_ref::<T>()
} }
/// This function can be used to retrieve a mutable reference to the concrete instance of the pub fn packet_handler_mut(&mut self) -> &mut PacketHandler {
/// APID handler after it was passed to the distributor. &mut self.packet_handler
pub fn apid_handler_mut<T: SendableCcsdsPacketHandler<Error = E>>(&mut self) -> Option<&mut T> {
self.apid_handler.downcast_mut::<T>()
} }
fn dispatch_ccsds(&mut self, sp_header: &SpHeader, tc_raw: &[u8]) -> Result<(), CcsdsError<E>> { fn dispatch_ccsds(&mut self, sp_header: &SpHeader, tc_raw: &[u8]) -> Result<(), CcsdsError<E>> {
let apid = sp_header.apid(); let apid = sp_header.apid();
let valid_apids = self.apid_handler.valid_apids(); let valid_apids = self.packet_handler.valid_apids();
for &valid_apid in valid_apids { for &valid_apid in valid_apids {
if valid_apid == apid { if valid_apid == apid {
return self return self
.apid_handler .packet_handler
.handle_known_apid(sp_header, tc_raw) .handle_known_apid(sp_header, tc_raw)
.map_err(|e| CcsdsError::CustomError(e)); .map_err(|e| CcsdsError::CustomError(e));
} }
} }
self.apid_handler self.packet_handler
.handle_unknown_apid(sp_header, tc_raw) .handle_unknown_apid(sp_header, tc_raw)
.map_err(|e| CcsdsError::CustomError(e)) .map_err(|e| CcsdsError::CustomError(e))
} }
@ -244,6 +222,13 @@ pub(crate) mod tests {
&buf[0..size] &buf[0..size]
} }
pub fn generate_ping_tc_as_vec() -> Vec<u8> {
let mut sph = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap();
PusTcCreator::new_simple(&mut sph, 17, 1, None, true)
.to_vec()
.unwrap()
}
type SharedPacketQueue = Arc<Mutex<VecDeque<(u16, Vec<u8>)>>>; type SharedPacketQueue = Arc<Mutex<VecDeque<(u16, Vec<u8>)>>>;
pub struct BasicApidHandlerSharedQueue { pub struct BasicApidHandlerSharedQueue {
pub known_packet_queue: SharedPacketQueue, pub known_packet_queue: SharedPacketQueue,
@ -305,7 +290,8 @@ pub(crate) mod tests {
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let mut vec = Vec::new(); let mut vec = Vec::new();
vec.extend_from_slice(tc_raw); vec.extend_from_slice(tc_raw);
Ok(self.known_packet_queue.push_back((sp_header.apid(), vec))) self.known_packet_queue.push_back((sp_header.apid(), vec));
Ok(())
} }
fn handle_unknown_apid( fn handle_unknown_apid(
@ -315,7 +301,8 @@ pub(crate) mod tests {
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let mut vec = Vec::new(); let mut vec = Vec::new();
vec.extend_from_slice(tc_raw); vec.extend_from_slice(tc_raw);
Ok(self.unknown_packet_queue.push_back((sp_header.apid(), vec))) self.unknown_packet_queue.push_back((sp_header.apid(), vec));
Ok(())
} }
} }
@ -327,7 +314,7 @@ pub(crate) mod tests {
known_packet_queue: known_packet_queue.clone(), known_packet_queue: known_packet_queue.clone(),
unknown_packet_queue: unknown_packet_queue.clone(), unknown_packet_queue: unknown_packet_queue.clone(),
}; };
let mut ccsds_distrib = CcsdsDistributor::new(Box::new(apid_handler)); let mut ccsds_distrib = CcsdsDistributor::new(apid_handler);
is_send(&ccsds_distrib); is_send(&ccsds_distrib);
let mut test_buf: [u8; 32] = [0; 32]; let mut test_buf: [u8; 32] = [0; 32];
let tc_slice = generate_ping_tc(test_buf.as_mut_slice()); let tc_slice = generate_ping_tc(test_buf.as_mut_slice());
@ -342,14 +329,9 @@ pub(crate) mod tests {
} }
#[test] #[test]
fn test_distribs_unknown_apid() { fn test_unknown_apid_handling() {
let known_packet_queue = Arc::new(Mutex::default()); let apid_handler = BasicApidHandlerOwnedQueue::default();
let unknown_packet_queue = Arc::new(Mutex::default()); let mut ccsds_distrib = CcsdsDistributor::new(apid_handler);
let apid_handler = BasicApidHandlerSharedQueue {
known_packet_queue: known_packet_queue.clone(),
unknown_packet_queue: unknown_packet_queue.clone(),
};
let mut ccsds_distrib = CcsdsDistributor::new(Box::new(apid_handler));
let mut sph = SpHeader::tc_unseg(0x004, 0x34, 0).unwrap(); let mut sph = SpHeader::tc_unseg(0x004, 0x34, 0).unwrap();
let pus_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true); let pus_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let mut test_buf: [u8; 32] = [0; 32]; let mut test_buf: [u8; 32] = [0; 32];
@ -357,11 +339,52 @@ pub(crate) mod tests {
.write_to_bytes(test_buf.as_mut_slice()) .write_to_bytes(test_buf.as_mut_slice())
.expect("Error writing TC to buffer"); .expect("Error writing TC to buffer");
ccsds_distrib.pass_tc(&test_buf).expect("Passing TC failed"); ccsds_distrib.pass_tc(&test_buf).expect("Passing TC failed");
let recvd = unknown_packet_queue.lock().unwrap().pop_front(); assert!(ccsds_distrib.packet_handler().known_packet_queue.is_empty());
assert!(known_packet_queue.lock().unwrap().is_empty()); let apid_handler = ccsds_distrib.packet_handler_mut();
let recvd = apid_handler.unknown_packet_queue.pop_front();
assert!(recvd.is_some()); assert!(recvd.is_some());
let (apid, packet) = recvd.unwrap(); let (apid, packet) = recvd.unwrap();
assert_eq!(apid, 0x004); assert_eq!(apid, 0x004);
assert_eq!(packet.as_slice(), test_buf); assert_eq!(packet.as_slice(), test_buf);
} }
#[test]
fn test_ccsds_distribution() {
let mut ccsds_distrib = CcsdsDistributor::new(BasicApidHandlerOwnedQueue::default());
let mut sph = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap();
let pus_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let tc_vec = pus_tc.to_vec().unwrap();
ccsds_distrib
.pass_ccsds(&sph, &tc_vec)
.expect("passing CCSDS TC failed");
let recvd = ccsds_distrib
.packet_handler_mut()
.known_packet_queue
.pop_front();
assert!(recvd.is_some());
let recvd = recvd.unwrap();
assert_eq!(recvd.0, 0x002);
assert_eq!(recvd.1, tc_vec);
}
#[test]
fn test_distribution_short_packet_fails() {
let mut ccsds_distrib = CcsdsDistributor::new(BasicApidHandlerOwnedQueue::default());
let mut sph = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap();
let pus_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let tc_vec = pus_tc.to_vec().unwrap();
let result = ccsds_distrib.pass_tc(&tc_vec[0..6]);
assert!(result.is_err());
let error = result.unwrap_err();
if let CcsdsError::ByteConversionError(ByteConversionError::FromSliceTooSmall {
found,
expected,
}) = error
{
assert_eq!(found, 6);
assert_eq!(expected, 7);
} else {
panic!("Unexpected error variant");
}
}
} }

View File

@ -18,7 +18,7 @@ pub mod tm_helper;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use ccsds_distrib::{CcsdsDistributor, CcsdsError, CcsdsPacketHandler}; pub use ccsds_distrib::{CcsdsDistributor, CcsdsError, CcsdsPacketHandler};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use pus_distrib::{PusDistributor, PusServiceProvider}; pub use pus_distrib::{PusDistributor, PusServiceDistributor};
/// Generic trait for object which can receive any telecommands in form of a raw bytestream, with /// Generic trait for object which can receive any telecommands in form of a raw bytestream, with
/// no assumptions about the received protocol. /// no assumptions about the received protocol.

View File

@ -2,7 +2,7 @@
//! //!
//! The routing components consist of two core components: //! The routing components consist of two core components:
//! 1. [PusDistributor] component which dispatches received packets to a user-provided handler. //! 1. [PusDistributor] component which dispatches received packets to a user-provided handler.
//! 2. [PusServiceProvider] trait which should be implemented by the user-provided PUS packet //! 2. [PusServiceDistributor] trait which should be implemented by the user-provided PUS packet
//! handler. //! handler.
//! //!
//! The [PusDistributor] implements the [ReceivesEcssPusTc], [ReceivesCcsdsTc] and the //! The [PusDistributor] implements the [ReceivesEcssPusTc], [ReceivesCcsdsTc] and the
@ -13,25 +13,26 @@
//! the raw bytestream. If this process fails, a [PusDistribError::PusError] is returned to the //! the raw bytestream. If this process fails, a [PusDistribError::PusError] is returned to the
//! user. //! user.
//! 2. If it was possible to extract both components, the packet will be passed to the //! 2. If it was possible to extract both components, the packet will be passed to the
//! [PusServiceProvider::handle_pus_tc_packet] method provided by the user. //! [PusServiceDistributor::distribute_packet] method provided by the user.
//! //!
//! # Example //! # Example
//! //!
//! ```rust //! ```rust
//! use spacepackets::ecss::WritablePusPacket; //! use spacepackets::ecss::WritablePusPacket;
//! use satrs::tmtc::pus_distrib::{PusDistributor, PusServiceProvider}; //! use satrs::tmtc::pus_distrib::{PusDistributor, PusServiceDistributor};
//! use satrs::tmtc::{ReceivesTc, ReceivesTcCore}; //! use satrs::tmtc::{ReceivesTc, ReceivesTcCore};
//! use spacepackets::SpHeader; //! use spacepackets::SpHeader;
//! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader}; //! use spacepackets::ecss::tc::{PusTcCreator, PusTcReader};
//!
//! struct ConcretePusHandler { //! struct ConcretePusHandler {
//! handler_call_count: u32 //! handler_call_count: u32
//! } //! }
//! //!
//! // This is a very simple possible service provider. It increments an internal call count field, //! // This is a very simple possible service provider. It increments an internal call count field,
//! // which is used to verify the handler was called //! // which is used to verify the handler was called
//! impl PusServiceProvider for ConcretePusHandler { //! impl PusServiceDistributor for ConcretePusHandler {
//! type Error = (); //! type Error = ();
//! fn handle_pus_tc_packet(&mut self, service: u8, header: &SpHeader, pus_tc: &PusTcReader) -> Result<(), Self::Error> { //! fn distribute_packet(&mut self, service: u8, header: &SpHeader, pus_tc: &PusTcReader) -> Result<(), Self::Error> {
//! assert_eq!(service, 17); //! assert_eq!(service, 17);
//! assert_eq!(pus_tc.len_packed(), 13); //! assert_eq!(pus_tc.len_packed(), 13);
//! self.handler_call_count += 1; //! self.handler_call_count += 1;
@ -42,7 +43,7 @@
//! let service_handler = ConcretePusHandler { //! let service_handler = ConcretePusHandler {
//! handler_call_count: 0 //! handler_call_count: 0
//! }; //! };
//! let mut pus_distributor = PusDistributor::new(Box::new(service_handler)); //! let mut pus_distributor = PusDistributor::new(service_handler);
//! //!
//! // Create and pass PUS ping telecommand with a valid APID //! // Create and pass PUS ping telecommand with a valid APID
//! let mut space_packet_header = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap(); //! let mut space_packet_header = SpHeader::tc_unseg(0x002, 0x34, 0).unwrap();
@ -57,50 +58,42 @@
//! //!
//! // User helper function to retrieve concrete class. We check the call count here to verify //! // User helper function to retrieve concrete class. We check the call count here to verify
//! // that the PUS ping telecommand was routed successfully. //! // that the PUS ping telecommand was routed successfully.
//! let concrete_handler_ref: &ConcretePusHandler = pus_distributor //! let concrete_handler = pus_distributor.service_distributor();
//! .service_provider_ref() //! assert_eq!(concrete_handler.handler_call_count, 1);
//! .expect("Casting back to concrete type failed");
//! assert_eq!(concrete_handler_ref.handler_call_count, 1);
//! ``` //! ```
use crate::pus::ReceivesEcssPusTc; use crate::pus::ReceivesEcssPusTc;
use crate::tmtc::{ReceivesCcsdsTc, ReceivesTcCore}; use crate::tmtc::{ReceivesCcsdsTc, ReceivesTcCore};
use alloc::boxed::Box;
use core::fmt::{Display, Formatter}; use core::fmt::{Display, Formatter};
use downcast_rs::Downcast;
use spacepackets::ecss::tc::PusTcReader; use spacepackets::ecss::tc::PusTcReader;
use spacepackets::ecss::{PusError, PusPacket}; use spacepackets::ecss::{PusError, PusPacket};
use spacepackets::SpHeader; use spacepackets::SpHeader;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error::Error; use std::error::Error;
pub trait PusServiceProvider: Downcast { /// Trait for a generic distributor object which can distribute PUS packets based on packet
/// properties like the PUS service, space packet header or any other content of the PUS packet.
pub trait PusServiceDistributor {
type Error; type Error;
fn handle_pus_tc_packet( fn distribute_packet(
&mut self, &mut self,
service: u8, service: u8,
header: &SpHeader, header: &SpHeader,
pus_tc: &PusTcReader, pus_tc: &PusTcReader,
) -> Result<(), Self::Error>; ) -> Result<(), Self::Error>;
} }
downcast_rs::impl_downcast!(PusServiceProvider assoc Error);
pub trait SendablePusServiceProvider: PusServiceProvider + Send {}
impl<T: Send + PusServiceProvider> SendablePusServiceProvider for T {}
downcast_rs::impl_downcast!(SendablePusServiceProvider assoc Error);
/// Generic distributor object which dispatches received packets to a user provided handler. /// Generic distributor object which dispatches received packets to a user provided handler.
/// pub struct PusDistributor<ServiceDistributor: PusServiceDistributor<Error = E>, E> {
/// This distributor expects the passed trait object to be [Send]able to allow more ergonomic service_distributor: ServiceDistributor,
/// usage with threads.
pub struct PusDistributor<E> {
pub service_provider: Box<dyn SendablePusServiceProvider<Error = E>>,
} }
impl<E> PusDistributor<E> { impl<ServiceDistributor: PusServiceDistributor<Error = E>, E>
pub fn new(service_provider: Box<dyn SendablePusServiceProvider<Error = E>>) -> Self { PusDistributor<ServiceDistributor, E>
PusDistributor { service_provider } {
pub fn new(service_provider: ServiceDistributor) -> Self {
PusDistributor {
service_distributor: service_provider,
}
} }
} }
@ -113,8 +106,8 @@ pub enum PusDistribError<E> {
impl<E: Display> Display for PusDistribError<E> { impl<E: Display> Display for PusDistribError<E> {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self { match self {
PusDistribError::CustomError(e) => write!(f, "{e}"), PusDistribError::CustomError(e) => write!(f, "pus distribution error: {e}"),
PusDistribError::PusError(e) => write!(f, "{e}"), PusDistribError::PusError(e) => write!(f, "pus distribution error: {e}"),
} }
} }
} }
@ -129,7 +122,9 @@ impl<E: Error> Error for PusDistribError<E> {
} }
} }
impl<E: 'static> ReceivesTcCore for PusDistributor<E> { impl<ServiceDistributor: PusServiceDistributor<Error = E>, E: 'static> ReceivesTcCore
for PusDistributor<ServiceDistributor, E>
{
type Error = PusDistribError<E>; type Error = PusDistribError<E>;
fn pass_tc(&mut self, tm_raw: &[u8]) -> Result<(), Self::Error> { fn pass_tc(&mut self, tm_raw: &[u8]) -> Result<(), Self::Error> {
// Convert to ccsds and call pass_ccsds // Convert to ccsds and call pass_ccsds
@ -139,7 +134,9 @@ impl<E: 'static> ReceivesTcCore for PusDistributor<E> {
} }
} }
impl<E: 'static> ReceivesCcsdsTc for PusDistributor<E> { impl<ServiceDistributor: PusServiceDistributor<Error = E>, E: 'static> ReceivesCcsdsTc
for PusDistributor<ServiceDistributor, E>
{
type Error = PusDistribError<E>; type Error = PusDistribError<E>;
fn pass_ccsds(&mut self, header: &SpHeader, tm_raw: &[u8]) -> Result<(), Self::Error> { fn pass_ccsds(&mut self, header: &SpHeader, tm_raw: &[u8]) -> Result<(), Self::Error> {
let (tc, _) = PusTcReader::new(tm_raw).map_err(|e| PusDistribError::PusError(e))?; let (tc, _) = PusTcReader::new(tm_raw).map_err(|e| PusDistribError::PusError(e))?;
@ -147,34 +144,39 @@ impl<E: 'static> ReceivesCcsdsTc for PusDistributor<E> {
} }
} }
impl<E: 'static> ReceivesEcssPusTc for PusDistributor<E> { impl<ServiceDistributor: PusServiceDistributor<Error = E>, E: 'static> ReceivesEcssPusTc
for PusDistributor<ServiceDistributor, E>
{
type Error = PusDistribError<E>; type Error = PusDistribError<E>;
fn pass_pus_tc(&mut self, header: &SpHeader, pus_tc: &PusTcReader) -> Result<(), Self::Error> { fn pass_pus_tc(&mut self, header: &SpHeader, pus_tc: &PusTcReader) -> Result<(), Self::Error> {
self.service_provider self.service_distributor
.handle_pus_tc_packet(pus_tc.service(), header, pus_tc) .distribute_packet(pus_tc.service(), header, pus_tc)
.map_err(|e| PusDistribError::CustomError(e)) .map_err(|e| PusDistribError::CustomError(e))
} }
} }
impl<E: 'static> PusDistributor<E> { impl<ServiceDistributor: PusServiceDistributor<Error = E>, E: 'static>
pub fn service_provider_ref<T: SendablePusServiceProvider<Error = E>>(&self) -> Option<&T> { PusDistributor<ServiceDistributor, E>
self.service_provider.downcast_ref::<T>() {
pub fn service_distributor(&self) -> &ServiceDistributor {
&self.service_distributor
} }
pub fn service_provider_mut<T: SendablePusServiceProvider<Error = E>>( pub fn service_distributor_mut(&mut self) -> &mut ServiceDistributor {
&mut self, &mut self.service_distributor
) -> Option<&mut T> {
self.service_provider.downcast_mut::<T>()
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::queue::GenericSendError;
use crate::tmtc::ccsds_distrib::tests::{ use crate::tmtc::ccsds_distrib::tests::{
generate_ping_tc, BasicApidHandlerOwnedQueue, BasicApidHandlerSharedQueue, generate_ping_tc, generate_ping_tc_as_vec, BasicApidHandlerOwnedQueue,
BasicApidHandlerSharedQueue,
}; };
use crate::tmtc::ccsds_distrib::{CcsdsDistributor, CcsdsPacketHandler}; use crate::tmtc::ccsds_distrib::{CcsdsDistributor, CcsdsPacketHandler};
use alloc::format;
use alloc::vec::Vec; use alloc::vec::Vec;
use spacepackets::ecss::PusError; use spacepackets::ecss::PusError;
use spacepackets::CcsdsPacket; use spacepackets::CcsdsPacket;
@ -185,54 +187,65 @@ mod tests {
fn is_send<T: Send>(_: &T) {} fn is_send<T: Send>(_: &T) {}
struct PusHandlerSharedQueue { pub struct PacketInfo {
pub pus_queue: Arc<Mutex<VecDeque<(u8, u16, Vec<u8>)>>>, pub service: u8,
pub apid: u16,
pub packet: Vec<u8>,
} }
struct PusHandlerSharedQueue(Arc<Mutex<VecDeque<PacketInfo>>>);
#[derive(Default)] #[derive(Default)]
struct PusHandlerOwnedQueue { struct PusHandlerOwnedQueue(VecDeque<PacketInfo>);
pub pus_queue: VecDeque<(u8, u16, Vec<u8>)>,
}
impl PusServiceProvider for PusHandlerSharedQueue { impl PusServiceDistributor for PusHandlerSharedQueue {
type Error = PusError; type Error = PusError;
fn handle_pus_tc_packet( fn distribute_packet(
&mut self, &mut self,
service: u8, service: u8,
sp_header: &SpHeader, sp_header: &SpHeader,
pus_tc: &PusTcReader, pus_tc: &PusTcReader,
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let mut vec: Vec<u8> = Vec::new(); let mut packet: Vec<u8> = Vec::new();
vec.extend_from_slice(pus_tc.raw_data()); packet.extend_from_slice(pus_tc.raw_data());
Ok(self self.0
.pus_queue
.lock() .lock()
.expect("Mutex lock failed") .expect("Mutex lock failed")
.push_back((service, sp_header.apid(), vec))) .push_back(PacketInfo {
service,
apid: sp_header.apid(),
packet,
});
Ok(())
} }
} }
impl PusServiceProvider for PusHandlerOwnedQueue { impl PusServiceDistributor for PusHandlerOwnedQueue {
type Error = PusError; type Error = PusError;
fn handle_pus_tc_packet( fn distribute_packet(
&mut self, &mut self,
service: u8, service: u8,
sp_header: &SpHeader, sp_header: &SpHeader,
pus_tc: &PusTcReader, pus_tc: &PusTcReader,
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let mut vec: Vec<u8> = Vec::new(); let mut packet: Vec<u8> = Vec::new();
vec.extend_from_slice(pus_tc.raw_data()); packet.extend_from_slice(pus_tc.raw_data());
Ok(self.pus_queue.push_back((service, sp_header.apid(), vec))) self.0.push_back(PacketInfo {
service,
apid: sp_header.apid(),
packet,
});
Ok(())
} }
} }
struct ApidHandlerShared { struct ApidHandlerShared {
pub pus_distrib: PusDistributor<PusError>, pub pus_distrib: PusDistributor<PusHandlerSharedQueue, PusError>,
pub handler_base: BasicApidHandlerSharedQueue, pub handler_base: BasicApidHandlerSharedQueue,
} }
struct ApidHandlerOwned { struct ApidHandlerOwned {
pub pus_distrib: PusDistributor<PusError>, pub pus_distrib: PusDistributor<PusHandlerOwnedQueue, PusError>,
handler_base: BasicApidHandlerOwnedQueue, handler_base: BasicApidHandlerOwnedQueue,
} }
@ -285,28 +298,36 @@ mod tests {
} }
#[test] #[test]
#[cfg(feature = "std")] fn test_pus_distribution_as_raw_packet() {
fn test_pus_distribution() { let mut pus_distrib = PusDistributor::new(PusHandlerOwnedQueue::default());
let tc = generate_ping_tc_as_vec();
let result = pus_distrib.pass_tc(&tc);
assert!(result.is_ok());
assert_eq!(pus_distrib.service_distributor_mut().0.len(), 1);
let packet_info = pus_distrib.service_distributor_mut().0.pop_front().unwrap();
assert_eq!(packet_info.service, 17);
assert_eq!(packet_info.apid, 0x002);
assert_eq!(packet_info.packet, tc);
}
#[test]
fn test_pus_distribution_combined_handler() {
let known_packet_queue = Arc::new(Mutex::default()); let known_packet_queue = Arc::new(Mutex::default());
let unknown_packet_queue = Arc::new(Mutex::default()); let unknown_packet_queue = Arc::new(Mutex::default());
let pus_queue = Arc::new(Mutex::default()); let pus_queue = Arc::new(Mutex::default());
let pus_handler = PusHandlerSharedQueue { let pus_handler = PusHandlerSharedQueue(pus_queue.clone());
pus_queue: pus_queue.clone(),
};
let handler_base = BasicApidHandlerSharedQueue { let handler_base = BasicApidHandlerSharedQueue {
known_packet_queue: known_packet_queue.clone(), known_packet_queue: known_packet_queue.clone(),
unknown_packet_queue: unknown_packet_queue.clone(), unknown_packet_queue: unknown_packet_queue.clone(),
}; };
let pus_distrib = PusDistributor { let pus_distrib = PusDistributor::new(pus_handler);
service_provider: Box::new(pus_handler),
};
is_send(&pus_distrib); is_send(&pus_distrib);
let apid_handler = ApidHandlerShared { let apid_handler = ApidHandlerShared {
pus_distrib, pus_distrib,
handler_base, handler_base,
}; };
let mut ccsds_distrib = CcsdsDistributor::new(Box::new(apid_handler)); let mut ccsds_distrib = CcsdsDistributor::new(apid_handler);
let mut test_buf: [u8; 32] = [0; 32]; let mut test_buf: [u8; 32] = [0; 32];
let tc_slice = generate_ping_tc(test_buf.as_mut_slice()); let tc_slice = generate_ping_tc(test_buf.as_mut_slice());
@ -322,25 +343,23 @@ mod tests {
assert_eq!(packet.as_slice(), tc_slice); assert_eq!(packet.as_slice(), tc_slice);
let recvd_pus = pus_queue.lock().unwrap().pop_front(); let recvd_pus = pus_queue.lock().unwrap().pop_front();
assert!(recvd_pus.is_some()); assert!(recvd_pus.is_some());
let (service, apid, tc_raw) = recvd_pus.unwrap(); let packet_info = recvd_pus.unwrap();
assert_eq!(service, 17); assert_eq!(packet_info.service, 17);
assert_eq!(apid, 0x002); assert_eq!(packet_info.apid, 0x002);
assert_eq!(tc_raw, tc_slice); assert_eq!(packet_info.packet, tc_slice);
} }
#[test] #[test]
fn test_as_any_cast() { fn test_accessing_combined_distributor() {
let pus_handler = PusHandlerOwnedQueue::default(); let pus_handler = PusHandlerOwnedQueue::default();
let handler_base = BasicApidHandlerOwnedQueue::default(); let handler_base = BasicApidHandlerOwnedQueue::default();
let pus_distrib = PusDistributor { let pus_distrib = PusDistributor::new(pus_handler);
service_provider: Box::new(pus_handler),
};
let apid_handler = ApidHandlerOwned { let apid_handler = ApidHandlerOwned {
pus_distrib, pus_distrib,
handler_base, handler_base,
}; };
let mut ccsds_distrib = CcsdsDistributor::new(Box::new(apid_handler)); let mut ccsds_distrib = CcsdsDistributor::new(apid_handler);
let mut test_buf: [u8; 32] = [0; 32]; let mut test_buf: [u8; 32] = [0; 32];
let tc_slice = generate_ping_tc(test_buf.as_mut_slice()); let tc_slice = generate_ping_tc(test_buf.as_mut_slice());
@ -349,21 +368,38 @@ mod tests {
.pass_tc(tc_slice) .pass_tc(tc_slice)
.expect("Passing TC slice failed"); .expect("Passing TC slice failed");
let apid_handler_casted_back: &mut ApidHandlerOwned = ccsds_distrib let apid_handler_casted_back = ccsds_distrib.packet_handler_mut();
.apid_handler_mut()
.expect("Cast to concrete type ApidHandler failed");
assert!(!apid_handler_casted_back assert!(!apid_handler_casted_back
.handler_base .handler_base
.known_packet_queue .known_packet_queue
.is_empty()); .is_empty());
let handler_casted_back: &mut PusHandlerOwnedQueue = apid_handler_casted_back let handler_owned_queue = apid_handler_casted_back
.pus_distrib .pus_distrib
.service_provider_mut() .service_distributor_mut();
.expect("Cast to concrete type PusHandlerOwnedQueue failed"); assert!(!handler_owned_queue.0.is_empty());
assert!(!handler_casted_back.pus_queue.is_empty()); let packet_info = handler_owned_queue.0.pop_front().unwrap();
let (service, apid, packet_raw) = handler_casted_back.pus_queue.pop_front().unwrap(); assert_eq!(packet_info.service, 17);
assert_eq!(service, 17); assert_eq!(packet_info.apid, 0x002);
assert_eq!(apid, 0x002); assert_eq!(packet_info.packet, tc_slice);
assert_eq!(packet_raw.as_slice(), tc_slice); }
#[test]
fn test_pus_distrib_error_custom_error() {
let error = PusDistribError::CustomError(GenericSendError::RxDisconnected);
let error_string = format!("{}", error);
assert_eq!(
error_string,
"pus distribution error: rx side has disconnected"
);
}
#[test]
fn test_pus_distrib_error_pus_error() {
let error = PusDistribError::<GenericSendError>::PusError(PusError::CrcCalculationMissing);
let error_string = format!("{}", error);
assert_eq!(
error_string,
"pus distribution error: crc16 was not calculated"
);
} }
} }

View File

@ -1,14 +1,14 @@
use satrs::event_man::{ use satrs::event_man::{
EventManagerWithMpscQueue, MpscEventU32Receiver, MpscEventU32SendProvider, SendEventProvider, EventManagerWithMpsc, EventSendProvider, EventU32SenderMpsc, MpscEventU32Receiver,
}; };
use satrs::events::{EventU32, EventU32TypedSev, Severity, SeverityInfo}; use satrs::events::{EventU32, EventU32TypedSev, Severity, SeverityInfo};
use satrs::params::U32Pair; use satrs::params::U32Pair;
use satrs::params::{Params, ParamsHeapless, WritableToBeBytes}; use satrs::params::{Params, ParamsHeapless, WritableToBeBytes};
use satrs::pus::event_man::{DefaultPusMgmtBackendProvider, EventReporter, PusEventDispatcher}; use satrs::pus::event_man::{DefaultPusEventMgmtBackend, EventReporter, PusEventDispatcher};
use satrs::pus::MpscTmAsVecSender; use satrs::pus::TmAsVecSenderWithMpsc;
use spacepackets::ecss::tm::PusTmReader; use spacepackets::ecss::tm::PusTmReader;
use spacepackets::ecss::{PusError, PusPacket}; use spacepackets::ecss::{PusError, PusPacket};
use std::sync::mpsc::{channel, SendError, TryRecvError}; use std::sync::mpsc::{self, SendError, TryRecvError};
use std::thread; use std::thread;
const INFO_EVENT: EventU32TypedSev<SeverityInfo> = const INFO_EVENT: EventU32TypedSev<SeverityInfo> =
@ -24,21 +24,21 @@ pub enum CustomTmSenderError {
#[test] #[test]
fn test_threaded_usage() { fn test_threaded_usage() {
let (event_sender, event_man_receiver) = channel(); let (event_sender, event_man_receiver) = mpsc::channel();
let event_receiver = MpscEventU32Receiver::new(event_man_receiver); let event_receiver = MpscEventU32Receiver::new(event_man_receiver);
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_receiver)); let mut event_man = EventManagerWithMpsc::new(event_receiver);
let (pus_event_man_tx, pus_event_man_rx) = channel(); let (pus_event_man_tx, pus_event_man_rx) = mpsc::channel();
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx); let pus_event_man_send_provider = EventU32SenderMpsc::new(1, pus_event_man_tx);
event_man.subscribe_all(pus_event_man_send_provider.id()); event_man.subscribe_all(pus_event_man_send_provider.channel_id());
event_man.add_sender(pus_event_man_send_provider); event_man.add_sender(pus_event_man_send_provider);
let (event_tx, event_rx) = channel(); let (event_tx, event_rx) = mpsc::channel();
let reporter = EventReporter::new(0x02, 128).expect("Creating event reporter failed"); let reporter = EventReporter::new(0x02, 128).expect("Creating event reporter failed");
let backend = DefaultPusMgmtBackendProvider::<EventU32>::default(); let mut pus_event_man =
let mut pus_event_man = PusEventDispatcher::new(reporter, Box::new(backend)); PusEventDispatcher::new(reporter, DefaultPusEventMgmtBackend::default());
// PUS + Generic event manager thread // PUS + Generic event manager thread
let jh0 = thread::spawn(move || { let jh0 = thread::spawn(move || {
let mut sender = MpscTmAsVecSender::new(0, "event_sender", event_tx); let mut sender = TmAsVecSenderWithMpsc::new(0, "event_sender", event_tx);
let mut event_cnt = 0; let mut event_cnt = 0;
let mut params_array: [u8; 128] = [0; 128]; let mut params_array: [u8; 128] = [0; 128];
loop { loop {
@ -71,6 +71,7 @@ fn test_threaded_usage() {
Params::Vec(vec) => gen_event(Some(vec.as_slice())), Params::Vec(vec) => gen_event(Some(vec.as_slice())),
Params::String(str) => gen_event(Some(str.as_bytes())), Params::String(str) => gen_event(Some(str.as_bytes())),
Params::Store(_) => gen_event(None), Params::Store(_) => gen_event(None),
_ => panic!("unsupported parameter type"),
} }
} else { } else {
gen_event(None) gen_event(None)
@ -120,10 +121,7 @@ fn test_threaded_usage() {
} }
} }
event_sender event_sender
.send(( .send((LOW_SEV_EVENT, Some(Params::Heapless((2_u32, 3_u32).into()))))
LOW_SEV_EVENT.into(),
Some(Params::Heapless((2_u32, 3_u32).into())),
))
.expect("Sending low severity event failed"); .expect("Sending low severity event failed");
loop { loop {
match event_rx.try_recv() { match event_rx.try_recv() {

View File

@ -6,7 +6,7 @@ pub mod crossbeam_test {
FailParams, RequestId, VerificationReporterCfg, VerificationReporterWithSender, FailParams, RequestId, VerificationReporterCfg, VerificationReporterWithSender,
VerificationReportingProvider, VerificationReportingProvider,
}; };
use satrs::pus::CrossbeamTmInStoreSender; use satrs::pus::TmInSharedPoolSenderWithCrossbeam;
use satrs::tmtc::tm_helper::SharedTmPool; use satrs::tmtc::tm_helper::SharedTmPool;
use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader}; use spacepackets::ecss::tc::{PusTcCreator, PusTcReader, PusTcSecondaryHeader};
use spacepackets::ecss::tm::PusTmReader; use spacepackets::ecss::tm::PusTmReader;
@ -40,10 +40,13 @@ pub mod crossbeam_test {
let shared_tc_pool_0 = Arc::new(RwLock::new(StaticMemoryPool::new(pool_cfg))); let shared_tc_pool_0 = Arc::new(RwLock::new(StaticMemoryPool::new(pool_cfg)));
let shared_tc_pool_1 = shared_tc_pool_0.clone(); let shared_tc_pool_1 = shared_tc_pool_0.clone();
let (tx, rx) = crossbeam_channel::bounded(10); let (tx, rx) = crossbeam_channel::bounded(10);
let sender = let sender = TmInSharedPoolSenderWithCrossbeam::new(
CrossbeamTmInStoreSender::new(0, "verif_sender", shared_tm_pool.clone(), tx.clone()); 0,
let mut reporter_with_sender_0 = "verif_sender",
VerificationReporterWithSender::new(&cfg, Box::new(sender)); shared_tm_pool.clone(),
tx.clone(),
);
let mut reporter_with_sender_0 = VerificationReporterWithSender::new(&cfg, sender);
let mut reporter_with_sender_1 = reporter_with_sender_0.clone(); let mut reporter_with_sender_1 = reporter_with_sender_0.clone();
// For test purposes, we retrieve the request ID from the TCs and pass them to the receiver // For test purposes, we retrieve the request ID from the TCs and pass them to the receiver
// tread. // tread.