fixes for example event management
Some checks failed
Rust/sat-rs/pipeline/pr-main There was a failure building this commit

This commit is contained in:
Robin Müller 2024-04-24 14:09:02 +02:00
parent 4fe95edecd
commit c97c22b7ac
Signed by: muellerr
GPG Key ID: A649FB78196E3849
6 changed files with 153 additions and 107 deletions

View File

@ -10,6 +10,7 @@ class Apid(enum.IntEnum):
GENERIC_PUS = 2 GENERIC_PUS = 2
ACS = 3 ACS = 3
CFDP = 4 CFDP = 4
TMTC = 5
class EventSeverity(enum.IntEnum): class EventSeverity(enum.IntEnum):

View File

@ -144,7 +144,9 @@ class PusHandler(GenericApidHandlerBase):
) )
src_data = tm_packet.source_data src_data = tm_packet.source_data
event_u32 = EventU32.unpack(src_data) event_u32 = EventU32.unpack(src_data)
_LOGGER.info(f"Received event packet. Event: {event_u32}") _LOGGER.info(
f"Received event packet. Source APID: {Apid(tm_packet.apid)!r}, Event: {event_u32}"
)
if event_u32.group_id == 0 and event_u32.unique_id == 0: if event_u32.group_id == 0 and event_u32.unique_id == 0:
_LOGGER.info("Received test event") _LOGGER.info("Received test event")
elif service == 17: elif service == 17:

View File

@ -11,7 +11,7 @@ use satrs::{
event_man::{EventManagerWithBoundedMpsc, EventSendProvider, EventU32SenderMpscBounded}, event_man::{EventManagerWithBoundedMpsc, EventSendProvider, EventU32SenderMpscBounded},
pus::{ pus::{
event_man::{ event_man::{
DefaultPusEventU32Dispatcher, EventReporter, EventRequest, EventRequestWithToken, DefaultPusEventU32TmCreator, EventReporter, EventRequest, EventRequestWithToken,
}, },
verification::{TcStateStarted, VerificationReportingProvider, VerificationToken}, verification::{TcStateStarted, VerificationReportingProvider, VerificationToken},
}, },
@ -37,13 +37,12 @@ impl EventTmHookProvider for EventApidSetter {
/// packets. It also handles the verification completion of PUS event service requests. /// packets. It also handles the verification completion of PUS event service requests.
pub struct PusEventHandler<TmSender: EcssTmSender> { pub struct PusEventHandler<TmSender: EcssTmSender> {
event_request_rx: mpsc::Receiver<EventRequestWithToken>, event_request_rx: mpsc::Receiver<EventRequestWithToken>,
pus_event_dispatcher: DefaultPusEventU32Dispatcher<()>, pus_event_tm_creator: DefaultPusEventU32TmCreator<EventApidSetter>,
pus_event_man_rx: mpsc::Receiver<EventMessageU32>, pus_event_man_rx: mpsc::Receiver<EventMessageU32>,
tm_sender: TmSender, tm_sender: TmSender,
time_provider: CdsTime, time_provider: CdsTime,
timestamp: [u8; 7], timestamp: [u8; 7],
verif_handler: VerificationReporter, verif_handler: VerificationReporter,
event_apid_setter: EventApidSetter,
} }
impl<TmSender: EcssTmSender> PusEventHandler<TmSender> { impl<TmSender: EcssTmSender> PusEventHandler<TmSender> {
@ -58,9 +57,16 @@ impl<TmSender: EcssTmSender> PusEventHandler<TmSender> {
// All events sent to the manager are routed to the PUS event manager, which generates PUS event // All events sent to the manager are routed to the PUS event manager, which generates PUS event
// telemetry for each event. // telemetry for each event.
let event_reporter = EventReporter::new(PUS_EVENT_MANAGEMENT.raw(), 0, 0, 128).unwrap(); let event_reporter = EventReporter::new_with_hook(
PUS_EVENT_MANAGEMENT.raw(),
0,
0,
128,
EventApidSetter::default(),
)
.unwrap();
let pus_event_dispatcher = let pus_event_dispatcher =
DefaultPusEventU32Dispatcher::new_with_default_backend(event_reporter); DefaultPusEventU32TmCreator::new_with_default_backend(event_reporter);
let pus_event_man_send_provider = EventU32SenderMpscBounded::new( let pus_event_man_send_provider = EventU32SenderMpscBounded::new(
PUS_EVENT_MANAGEMENT.raw(), PUS_EVENT_MANAGEMENT.raw(),
pus_event_man_tx, pus_event_man_tx,
@ -72,13 +78,12 @@ impl<TmSender: EcssTmSender> PusEventHandler<TmSender> {
Self { Self {
event_request_rx, event_request_rx,
pus_event_dispatcher, pus_event_tm_creator: pus_event_dispatcher,
pus_event_man_rx, pus_event_man_rx,
time_provider: CdsTime::new_with_u16_days(0, 0), time_provider: CdsTime::new_with_u16_days(0, 0),
timestamp: [0; 7], timestamp: [0; 7],
verif_handler, verif_handler,
tm_sender, tm_sender,
event_apid_setter: EventApidSetter::default(),
} }
} }
@ -92,36 +97,49 @@ impl<TmSender: EcssTmSender> PusEventHandler<TmSender> {
.completion_success(&self.tm_sender, started_token, timestamp) .completion_success(&self.tm_sender, started_token, timestamp)
.expect("Sending completion success failed"); .expect("Sending completion success failed");
}; };
loop {
// handle event requests // handle event requests
if let Ok(event_req) = self.event_request_rx.try_recv() { match self.event_request_rx.try_recv() {
match event_req.request { Ok(event_req) => match event_req.request {
EventRequest::Enable(event) => { EventRequest::Enable(event) => {
self.pus_event_dispatcher self.pus_event_tm_creator
.enable_tm_for_event(&event) .enable_tm_for_event(&event)
.expect("Enabling TM failed"); .expect("Enabling TM failed");
update_time(&mut self.time_provider, &mut self.timestamp); update_time(&mut self.time_provider, &mut self.timestamp);
report_completion(event_req, &self.timestamp); report_completion(event_req, &self.timestamp);
} }
EventRequest::Disable(event) => { EventRequest::Disable(event) => {
self.pus_event_dispatcher self.pus_event_tm_creator
.disable_tm_for_event(&event) .disable_tm_for_event(&event)
.expect("Disabling TM failed"); .expect("Disabling TM failed");
update_time(&mut self.time_provider, &mut self.timestamp); update_time(&mut self.time_provider, &mut self.timestamp);
report_completion(event_req, &self.timestamp); report_completion(event_req, &self.timestamp);
} }
},
Err(e) => match e {
mpsc::TryRecvError::Empty => break,
mpsc::TryRecvError::Disconnected => {
log::warn!("all event request senders have disconnected");
break;
}
},
} }
} }
} }
pub fn generate_pus_event_tm(&mut self) { pub fn generate_pus_event_tm(&mut self) {
loop {
// Perform the generation of PUS event packets // Perform the generation of PUS event packets
if let Ok(event_msg) = self.pus_event_man_rx.try_recv() { match self.pus_event_man_rx.try_recv() {
Ok(event_msg) => {
update_time(&mut self.time_provider, &mut self.timestamp); update_time(&mut self.time_provider, &mut self.timestamp);
let param_vec = event_msg.params().map_or(Vec::new(), |param| { let param_vec = event_msg.params().map_or(Vec::new(), |param| {
param.to_vec().expect("failed to convert params to vec") param.to_vec().expect("failed to convert params to vec")
}); });
self.event_apid_setter.next_apid = UniqueApidTargetId::from(event_msg.sender_id()).apid; // We use the TM modification hook to set the sender APID for each event.
self.pus_event_dispatcher self.pus_event_tm_creator.reporter.tm_hook.next_apid =
UniqueApidTargetId::from(event_msg.sender_id()).apid;
self.pus_event_tm_creator
.generate_pus_event_tm_generic( .generate_pus_event_tm_generic(
&self.tm_sender, &self.tm_sender,
&self.timestamp, &self.timestamp,
@ -130,6 +148,15 @@ impl<TmSender: EcssTmSender> PusEventHandler<TmSender> {
) )
.expect("Sending TM as event failed"); .expect("Sending TM as event failed");
} }
Err(e) => match e {
mpsc::TryRecvError::Empty => break,
mpsc::TryRecvError::Disconnected => {
log::warn!("All event senders have disconnected");
break;
}
},
}
}
} }
} }
@ -181,3 +208,6 @@ impl<TmSender: EcssTmSender> EventHandler<TmSender> {
log::warn!("event routing error for event {event_msg:?}: {error:?}"); log::warn!("event routing error for event {event_msg:?}: {error:?}");
} }
} }
#[cfg(test)]
mod tests {}

View File

@ -11,7 +11,7 @@ use crate::events::EventHandler;
use crate::interface::udp::DynamicUdpTmHandler; use crate::interface::udp::DynamicUdpTmHandler;
use crate::pus::stack::PusStack; use crate::pus::stack::PusStack;
use crate::tmtc::tc_source::{TcSourceTaskDynamic, TcSourceTaskStatic}; use crate::tmtc::tc_source::{TcSourceTaskDynamic, TcSourceTaskStatic};
use crate::tmtc::tm_sink::{TmFunnelDynamic, TmFunnelStatic}; use crate::tmtc::tm_sink::{TmSinkDynamic, TmSinkStatic};
use log::info; use log::info;
use pus::test::create_test_service_dynamic; use pus::test::create_test_service_dynamic;
use satrs::hal::std::tcp_server::ServerConfig; use satrs::hal::std::tcp_server::ServerConfig;
@ -54,11 +54,11 @@ fn static_tmtc_pool_main() {
let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool); let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool);
let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool); let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool);
let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50); let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50);
let (tm_funnel_tx, tm_funnel_rx) = mpsc::sync_channel(50); let (tm_sink_tx, tm_sink_rx) = mpsc::sync_channel(50);
let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50); let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
let tm_funnel_tx_sender = let tm_sink_tx_sender =
PacketSenderWithSharedPool::new(tm_funnel_tx.clone(), shared_tm_pool_wrapper.clone()); PacketSenderWithSharedPool::new(tm_sink_tx.clone(), shared_tm_pool_wrapper.clone());
let (mgm_handler_composite_tx, mgm_handler_composite_rx) = let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
mpsc::channel::<GenericMessage<CompositeRequest>>(); mpsc::channel::<GenericMessage<CompositeRequest>>();
@ -85,7 +85,7 @@ fn static_tmtc_pool_main() {
// The event task is the core handler to perform the event routing and TM handling as specified // The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation. // in the sat-rs documentation.
let mut event_handler = EventHandler::new(tm_funnel_tx.clone(), event_rx, event_request_rx); let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
let (pus_test_tx, pus_test_rx) = mpsc::channel(); let (pus_test_tx, pus_test_rx) = mpsc::channel();
let (pus_event_tx, pus_event_rx) = mpsc::channel(); let (pus_event_tx, pus_event_rx) = mpsc::channel();
@ -107,39 +107,39 @@ fn static_tmtc_pool_main() {
mode_tc_sender: pus_mode_tx, mode_tc_sender: pus_mode_tx,
}; };
let pus_test_service = create_test_service_static( let pus_test_service = create_test_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
shared_tc_pool.clone(), shared_tc_pool.clone(),
event_tx.clone(), event_tx.clone(),
pus_test_rx, pus_test_rx,
); );
let pus_scheduler_service = create_scheduler_service_static( let pus_scheduler_service = create_scheduler_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
tc_source.clone(), tc_source.clone(),
pus_sched_rx, pus_sched_rx,
create_sched_tc_pool(), create_sched_tc_pool(),
); );
let pus_event_service = create_event_service_static( let pus_event_service = create_event_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
shared_tc_pool.clone(), shared_tc_pool.clone(),
pus_event_rx, pus_event_rx,
event_request_tx, event_request_tx,
); );
let pus_action_service = create_action_service_static( let pus_action_service = create_action_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
shared_tc_pool.clone(), shared_tc_pool.clone(),
pus_action_rx, pus_action_rx,
request_map.clone(), request_map.clone(),
pus_action_reply_rx, pus_action_reply_rx,
); );
let pus_hk_service = create_hk_service_static( let pus_hk_service = create_hk_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
shared_tc_pool.clone(), shared_tc_pool.clone(),
pus_hk_rx, pus_hk_rx,
request_map.clone(), request_map.clone(),
pus_hk_reply_rx, pus_hk_reply_rx,
); );
let pus_mode_service = create_mode_service_static( let pus_mode_service = create_mode_service_static(
tm_funnel_tx_sender.clone(), tm_sink_tx_sender.clone(),
shared_tc_pool.clone(), shared_tc_pool.clone(),
pus_mode_rx, pus_mode_rx,
request_map, request_map,
@ -157,7 +157,7 @@ fn static_tmtc_pool_main() {
let mut tmtc_task = TcSourceTaskStatic::new( let mut tmtc_task = TcSourceTaskStatic::new(
shared_tc_pool_wrapper.clone(), shared_tc_pool_wrapper.clone(),
tc_source_rx, tc_source_rx,
PusTcDistributor::new(tm_funnel_tx_sender, pus_router), PusTcDistributor::new(tm_sink_tx_sender, pus_router),
); );
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT); let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
@ -187,10 +187,10 @@ fn static_tmtc_pool_main() {
) )
.expect("tcp server creation failed"); .expect("tcp server creation failed");
let mut tm_funnel = TmFunnelStatic::new( let mut tm_sink = TmSinkStatic::new(
shared_tm_pool_wrapper, shared_tm_pool_wrapper,
sync_tm_tcp_source, sync_tm_tcp_source,
tm_funnel_rx, tm_sink_rx,
tm_server_tx, tm_server_tx,
); );
@ -210,7 +210,7 @@ fn static_tmtc_pool_main() {
mode_leaf_interface, mode_leaf_interface,
mgm_handler_composite_rx, mgm_handler_composite_rx,
pus_hk_reply_tx, pus_hk_reply_tx,
tm_funnel_tx, tm_sink_tx,
dummy_spi_interface, dummy_spi_interface,
shared_mgm_set, shared_mgm_set,
); );
@ -241,9 +241,9 @@ fn static_tmtc_pool_main() {
info!("Starting TM funnel task"); info!("Starting TM funnel task");
let jh_tm_funnel = thread::Builder::new() let jh_tm_funnel = thread::Builder::new()
.name("TM Funnel".to_string()) .name("tm sink".to_string())
.spawn(move || loop { .spawn(move || loop {
tm_funnel.operation(); tm_sink.operation();
}) })
.unwrap(); .unwrap();
@ -410,7 +410,7 @@ fn dyn_tmtc_pool_main() {
) )
.expect("tcp server creation failed"); .expect("tcp server creation failed");
let mut tm_funnel = TmFunnelDynamic::new(sync_tm_tcp_source, tm_funnel_rx, tm_server_tx); let mut tm_funnel = TmSinkDynamic::new(sync_tm_tcp_source, tm_funnel_rx, tm_server_tx);
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) = let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
mpsc::channel(); mpsc::channel();

View File

@ -70,18 +70,23 @@ impl TmFunnelCommon {
} }
fn packet_printout(tm: &PusTmZeroCopyWriter) { fn packet_printout(tm: &PusTmZeroCopyWriter) {
info!("Sending PUS TM[{},{}]", tm.service(), tm.subservice()); info!(
"Sending PUS TM[{},{}] with APID {}",
tm.service(),
tm.subservice(),
tm.apid()
);
} }
} }
pub struct TmFunnelStatic { pub struct TmSinkStatic {
common: TmFunnelCommon, common: TmFunnelCommon,
shared_tm_store: SharedPacketPool, shared_tm_store: SharedPacketPool,
tm_funnel_rx: mpsc::Receiver<PacketInPool>, tm_funnel_rx: mpsc::Receiver<PacketInPool>,
tm_server_tx: mpsc::SyncSender<PacketInPool>, tm_server_tx: mpsc::SyncSender<PacketInPool>,
} }
impl TmFunnelStatic { impl TmSinkStatic {
pub fn new( pub fn new(
shared_tm_store: SharedPacketPool, shared_tm_store: SharedPacketPool,
sync_tm_tcp_source: SyncTcpTmSource, sync_tm_tcp_source: SyncTcpTmSource,
@ -121,13 +126,13 @@ impl TmFunnelStatic {
} }
} }
pub struct TmFunnelDynamic { pub struct TmSinkDynamic {
common: TmFunnelCommon, common: TmFunnelCommon,
tm_funnel_rx: mpsc::Receiver<PacketAsVec>, tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
tm_server_tx: mpsc::Sender<PacketAsVec>, tm_server_tx: mpsc::Sender<PacketAsVec>,
} }
impl TmFunnelDynamic { impl TmSinkDynamic {
pub fn new( pub fn new(
sync_tm_tcp_source: SyncTcpTmSource, sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: mpsc::Receiver<PacketAsVec>, tm_funnel_rx: mpsc::Receiver<PacketAsVec>,

View File

@ -28,7 +28,7 @@ pub use heapless_mod::*;
/// structure to track disabled events. A more primitive and embedded friendly /// structure to track disabled events. A more primitive and embedded friendly
/// solution could track this information in a static or pre-allocated list which contains /// solution could track this information in a static or pre-allocated list which contains
/// the disabled events. /// the disabled events.
pub trait PusEventMgmtBackendProvider<Event: GenericEvent> { pub trait PusEventReportingMap<Event: GenericEvent> {
type Error; type Error;
fn event_enabled(&self, event: &Event) -> bool; fn event_enabled(&self, event: &Event) -> bool;
@ -56,7 +56,7 @@ pub mod heapless_mod {
{ {
} }
impl<const N: usize, Provider: GenericEvent> PusEventMgmtBackendProvider<Provider> impl<const N: usize, Provider: GenericEvent> PusEventReportingMap<Provider>
for HeaplessPusMgmtBackendProvider<N, Provider> for HeaplessPusMgmtBackendProvider<N, Provider>
{ {
type Error = (); type Error = ();
@ -105,7 +105,10 @@ impl From<EcssTmtcError> for EventManError {
pub mod alloc_mod { pub mod alloc_mod {
use core::marker::PhantomData; use core::marker::PhantomData;
use crate::events::EventU16; use crate::{
events::EventU16,
pus::event::{DummyEventHook, EventTmHookProvider},
};
use super::*; use super::*;
@ -114,11 +117,11 @@ pub mod alloc_mod {
/// ///
/// This provider is a good option for host systems or larger embedded systems where /// This provider is a good option for host systems or larger embedded systems where
/// the expected occasional memory allocation performed by the [HashSet] is not an issue. /// the expected occasional memory allocation performed by the [HashSet] is not an issue.
pub struct DefaultPusEventMgmtBackend<Event: GenericEvent = EventU32> { pub struct DefaultPusEventReportingMap<Event: GenericEvent = EventU32> {
disabled: HashSet<Event>, disabled: HashSet<Event>,
} }
impl<Event: GenericEvent> Default for DefaultPusEventMgmtBackend<Event> { impl<Event: GenericEvent> Default for DefaultPusEventReportingMap<Event> {
fn default() -> Self { fn default() -> Self {
Self { Self {
disabled: HashSet::default(), disabled: HashSet::default(),
@ -126,51 +129,54 @@ pub mod alloc_mod {
} }
} }
impl<EV: GenericEvent + PartialEq + Eq + Hash + Copy + Clone> PusEventMgmtBackendProvider<EV> impl<Event: GenericEvent + PartialEq + Eq + Hash + Copy + Clone> PusEventReportingMap<Event>
for DefaultPusEventMgmtBackend<EV> for DefaultPusEventReportingMap<Event>
{ {
type Error = (); type Error = ();
fn event_enabled(&self, event: &EV) -> bool { fn event_enabled(&self, event: &Event) -> bool {
!self.disabled.contains(event) !self.disabled.contains(event)
} }
fn enable_event_reporting(&mut self, event: &EV) -> Result<bool, Self::Error> { fn enable_event_reporting(&mut self, event: &Event) -> Result<bool, Self::Error> {
Ok(self.disabled.remove(event)) Ok(self.disabled.remove(event))
} }
fn disable_event_reporting(&mut self, event: &EV) -> Result<bool, Self::Error> { fn disable_event_reporting(&mut self, event: &Event) -> Result<bool, Self::Error> {
Ok(self.disabled.insert(*event)) Ok(self.disabled.insert(*event))
} }
} }
pub struct PusEventDispatcher< pub struct PusEventTmCreatorWithMap<
B: PusEventMgmtBackendProvider<EV, Error = E>, ReportingMap: PusEventReportingMap<Event>,
EV: GenericEvent, Event: GenericEvent,
E, EventTmHook: EventTmHookProvider = DummyEventHook,
> { > {
reporter: EventReporter, pub reporter: EventReporter<EventTmHook>,
backend: B, reporting_map: ReportingMap,
phantom: PhantomData<(E, EV)>, phantom: PhantomData<Event>,
} }
impl<B: PusEventMgmtBackendProvider<Event, Error = E>, Event: GenericEvent, E> impl<
PusEventDispatcher<B, Event, E> ReportingMap: PusEventReportingMap<Event>,
Event: GenericEvent,
EventTmHook: EventTmHookProvider,
> PusEventTmCreatorWithMap<ReportingMap, Event, EventTmHook>
{ {
pub fn new(reporter: EventReporter, backend: B) -> Self { pub fn new(reporter: EventReporter<EventTmHook>, backend: ReportingMap) -> Self {
Self { Self {
reporter, reporter,
backend, reporting_map: backend,
phantom: PhantomData, phantom: PhantomData,
} }
} }
pub fn enable_tm_for_event(&mut self, event: &Event) -> Result<bool, E> { pub fn enable_tm_for_event(&mut self, event: &Event) -> Result<bool, ReportingMap::Error> {
self.backend.enable_event_reporting(event) self.reporting_map.enable_event_reporting(event)
} }
pub fn disable_tm_for_event(&mut self, event: &Event) -> Result<bool, E> { pub fn disable_tm_for_event(&mut self, event: &Event) -> Result<bool, ReportingMap::Error> {
self.backend.disable_event_reporting(event) self.reporting_map.disable_event_reporting(event)
} }
pub fn generate_pus_event_tm_generic( pub fn generate_pus_event_tm_generic(
@ -180,7 +186,7 @@ pub mod alloc_mod {
event: Event, event: Event,
params: Option<&[u8]>, params: Option<&[u8]>,
) -> Result<bool, EventManError> { ) -> Result<bool, EventManError> {
if !self.backend.event_enabled(&event) { if !self.reporting_map.event_enabled(&event) {
return Ok(false); return Ok(false);
} }
match event.severity() { match event.severity() {
@ -208,31 +214,33 @@ pub mod alloc_mod {
} }
} }
impl<EV: GenericEvent + Copy + PartialEq + Eq + Hash> impl<Event: GenericEvent + Copy + PartialEq + Eq + Hash, EventTmHook: EventTmHookProvider>
PusEventDispatcher<DefaultPusEventMgmtBackend<EV>, EV, ()> PusEventTmCreatorWithMap<DefaultPusEventReportingMap<Event>, Event, EventTmHook>
{ {
pub fn new_with_default_backend(reporter: EventReporter) -> Self { pub fn new_with_default_backend(reporter: EventReporter<EventTmHook>) -> Self {
Self { Self {
reporter, reporter,
backend: DefaultPusEventMgmtBackend::default(), reporting_map: DefaultPusEventReportingMap::default(),
phantom: PhantomData, phantom: PhantomData,
} }
} }
} }
impl<B: PusEventMgmtBackendProvider<EventU32, Error = E>, E> PusEventDispatcher<B, EventU32, E> { impl<ReportingMap: PusEventReportingMap<EventU32>>
PusEventTmCreatorWithMap<ReportingMap, EventU32>
{
pub fn enable_tm_for_event_with_sev<Severity: HasSeverity>( pub fn enable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self, &mut self,
event: &EventU32TypedSev<Severity>, event: &EventU32TypedSev<Severity>,
) -> Result<bool, E> { ) -> Result<bool, ReportingMap::Error> {
self.backend.enable_event_reporting(event.as_ref()) self.reporting_map.enable_event_reporting(event.as_ref())
} }
pub fn disable_tm_for_event_with_sev<Severity: HasSeverity>( pub fn disable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self, &mut self,
event: &EventU32TypedSev<Severity>, event: &EventU32TypedSev<Severity>,
) -> Result<bool, E> { ) -> Result<bool, ReportingMap::Error> {
self.backend.disable_event_reporting(event.as_ref()) self.reporting_map.disable_event_reporting(event.as_ref())
} }
pub fn generate_pus_event_tm<Severity: HasSeverity>( pub fn generate_pus_event_tm<Severity: HasSeverity>(
@ -246,10 +254,10 @@ pub mod alloc_mod {
} }
} }
pub type DefaultPusEventU16Dispatcher<E> = pub type DefaultPusEventU16TmCreator<EventTmHook = DummyEventHook> =
PusEventDispatcher<DefaultPusEventMgmtBackend<EventU16>, EventU16, E>; PusEventTmCreatorWithMap<DefaultPusEventReportingMap<EventU16>, EventU16, EventTmHook>;
pub type DefaultPusEventU32Dispatcher<E> = pub type DefaultPusEventU32TmCreator<EventTmHook = DummyEventHook> =
PusEventDispatcher<DefaultPusEventMgmtBackend<EventU32>, EventU32, E>; PusEventTmCreatorWithMap<DefaultPusEventReportingMap<EventU32>, EventU32, EventTmHook>;
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
@ -265,16 +273,16 @@ mod tests {
const TEST_APID: u16 = 0x02; const TEST_APID: u16 = 0x02;
const TEST_ID: UniqueApidTargetId = UniqueApidTargetId::new(TEST_APID, 0x05); const TEST_ID: UniqueApidTargetId = UniqueApidTargetId::new(TEST_APID, 0x05);
fn create_basic_man_1() -> DefaultPusEventU32Dispatcher<()> { fn create_basic_man_1() -> DefaultPusEventU32TmCreator {
let reporter = EventReporter::new(TEST_ID.raw(), TEST_APID, 0, 128) let reporter = EventReporter::new(TEST_ID.raw(), TEST_APID, 0, 128)
.expect("Creating event repoter failed"); .expect("Creating event repoter failed");
PusEventDispatcher::new_with_default_backend(reporter) PusEventTmCreatorWithMap::new_with_default_backend(reporter)
} }
fn create_basic_man_2() -> DefaultPusEventU32Dispatcher<()> { fn create_basic_man_2() -> DefaultPusEventU32TmCreator {
let reporter = EventReporter::new(TEST_ID.raw(), TEST_APID, 0, 128) let reporter = EventReporter::new(TEST_ID.raw(), TEST_APID, 0, 128)
.expect("Creating event repoter failed"); .expect("Creating event repoter failed");
let backend = DefaultPusEventMgmtBackend::default(); let backend = DefaultPusEventReportingMap::default();
PusEventDispatcher::new(reporter, backend) PusEventTmCreatorWithMap::new(reporter, backend)
} }
#[test] #[test]