this is really tricky
Some checks failed
Rust/sat-rs/pipeline/pr-main There was a failure building this commit

This commit is contained in:
Robin Müller 2024-02-05 18:25:39 +01:00
parent bec3b28527
commit d8d7c86f0c
Signed by: muellerr
GPG Key ID: A649FB78196E3849
3 changed files with 229 additions and 0 deletions

View File

@ -9,6 +9,7 @@ use satrs_core::spacepackets::time::TimeWriter;
use satrs_example::{tmtc_err, CustomPusServiceId};
use std::sync::mpsc::Sender;
pub mod stack;
pub mod action;
pub mod event;
pub mod hk;

View File

@ -0,0 +1,221 @@
// Prototyping software.
/*
use std::{collections::HashMap, sync::mpsc, time::Duration};
use satrs_core::{
events::EventU32,
params::Params,
pool::{SharedStaticMemoryPool, StaticMemoryPool, StoreAddr},
pus::{
event_man::EventRequestWithToken, event_srv::PusService5EventHandler,
scheduler::PusScheduler, scheduler_srv::PusService11SchedHandler,
test::PusService17TestHandler, verification::VerificationReporterWithSender,
EcssTcAndToken, EcssTcInSharedStoreConverter, MpscTcReceiver, MpscTmInStoreSender,
PusServiceHelper,
},
tmtc::tm_helper::SharedTmStore,
ChannelId,
};
use satrs_example::{TargetIdWithApid, TcReceiverId, TmSenderId, PUS_APID};
use crate::{requests::RequestWithToken, tmtc::PusTcSource};
use super::{
action::{Pus8Wrapper, PusService8ActionHandler},
event::Pus5Wrapper,
hk::{Pus3Wrapper, PusService3HkHandler},
scheduler::Pus11Wrapper,
test::Service17CustomWrapper,
PusTcMpscRouter,
};
pub struct PusStack {
event_srv: Pus5Wrapper,
hk_srv: Pus3Wrapper,
action_srv: Pus8Wrapper,
schedule_srv: Pus11Wrapper,
test_srv: Service17CustomWrapper,
}
impl PusStack {
pub fn test_service(
shared_tm_store: SharedTmStore,
tm_funnel_tx: mpsc::Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender,
tc_pool: SharedStaticMemoryPool,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
) -> (Service17CustomWrapper, mpsc::Sender<EcssTcAndToken>) {
let (pus_test_tx, pus_test_rx) = mpsc::channel();
let test_srv_tm_sender = MpscTmInStoreSender::new(
TmSenderId::PusTest as ChannelId,
"PUS_17_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let test_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusTest as ChannelId,
"PUS_17_TC_RECV",
pus_test_rx,
);
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
Box::new(test_srv_receiver),
Box::new(test_srv_tm_sender),
PUS_APID,
verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
));
(
Service17CustomWrapper {
pus17_handler,
test_srv_event_sender: event_sender,
},
pus_test_tx,
)
}
pub fn new(
shared_tm_store: SharedTmStore,
tm_funnel_tx: mpsc::Sender<StoreAddr>,
sched_tc_pool: StaticMemoryPool,
tc_source_wrapper: PusTcSource,
verif_reporter: VerificationReporterWithSender,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
request_map: HashMap<TargetIdWithApid, mpsc::Sender<RequestWithToken>>,
) -> (Self, PusTcMpscRouter) {
let (pus_event_tx, pus_event_rx) = mpsc::channel();
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let tc_pool = tc_source_wrapper.clone_backing_pool();
let pus_router = PusTcMpscRouter {
test_service_receiver: pus_test_tx,
event_service_receiver: pus_event_tx,
sched_service_receiver: pus_sched_tx,
hk_service_receiver: pus_hk_tx,
action_service_receiver: pus_action_tx,
};
let sched_srv_tm_sender = MpscTmInStoreSender::new(
TmSenderId::PusSched as ChannelId,
"PUS_11_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let sched_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusSched as ChannelId,
"PUS_11_TC_RECV",
pus_sched_rx,
);
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
.expect("Creating PUS Scheduler failed");
let pus_11_handler = PusService11SchedHandler::new(
PusServiceHelper::new(
Box::new(sched_srv_receiver),
Box::new(sched_srv_tm_sender),
PUS_APID,
verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_source_wrapper.clone_backing_pool(), 2048),
),
scheduler,
);
let pus_11_wrapper = Pus11Wrapper {
pus_11_handler,
sched_tc_pool,
tc_source_wrapper,
};
let event_srv_tm_sender = MpscTmInStoreSender::new(
TmSenderId::PusEvent as ChannelId,
"PUS_5_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let event_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusEvent as ChannelId,
"PUS_5_TC_RECV",
pus_event_rx,
);
let pus_5_handler = PusService5EventHandler::new(
PusServiceHelper::new(
Box::new(event_srv_receiver),
Box::new(event_srv_tm_sender),
PUS_APID,
verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
),
event_request_tx,
);
let pus_5_wrapper = Pus5Wrapper { pus_5_handler };
let action_srv_tm_sender = MpscTmInStoreSender::new(
TmSenderId::PusAction as ChannelId,
"PUS_8_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let action_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusAction as ChannelId,
"PUS_8_TC_RECV",
pus_action_rx,
);
let pus_8_handler = PusService8ActionHandler::new(
Box::new(action_srv_receiver),
Box::new(action_srv_tm_sender),
PUS_APID,
verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
request_map.clone(),
);
let pus_8_wrapper = Pus8Wrapper { pus_8_handler };
let hk_srv_tm_sender = MpscTmInStoreSender::new(
TmSenderId::PusHk as ChannelId,
"PUS_3_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let hk_srv_receiver =
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
let pus_3_handler = PusService3HkHandler::new(
Box::new(hk_srv_receiver),
Box::new(hk_srv_tm_sender),
PUS_APID,
verif_reporter.clone(),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
request_map,
);
let pus_3_wrapper = Pus3Wrapper { pus_3_handler };
(
Self {
event_srv: pus_5_wrapper,
hk_srv: pus_3_wrapper,
action_srv: pus_8_wrapper,
schedule_srv: pus_11_wrapper,
test_srv: pus_17_wrapper,
},
pus_router,
)
}
pub fn periodic_operation(&mut self) {
self.schedule_srv.release_tcs();
loop {
let mut all_queues_empty = true;
let mut is_srv_finished = |srv_handler_finished: bool| {
if !srv_handler_finished {
all_queues_empty = false;
}
};
is_srv_finished(self.test_srv.handle_next_packet());
is_srv_finished(self.schedule_srv.handle_next_packet());
is_srv_finished(self.event_srv.handle_next_packet());
is_srv_finished(self.action_srv.handle_next_packet());
is_srv_finished(self.hk_srv.handle_next_packet());
if all_queues_empty {
break;
}
}
}
}
*/

View File

@ -59,6 +59,13 @@ pub struct PusTcSource {
pub tc_store: TcStore,
}
impl PusTcSource {
#[allow(dead_code)]
pub fn clone_backing_pool(&self) -> SharedStaticMemoryPool {
self.tc_store.pool.clone()
}
}
impl ReceivesEcssPusTc for PusTcSource {
type Error = MpscStoreAndSendError;