diff --git a/satrs-core/src/pus/event_man.rs b/satrs-core/src/pus/event_man.rs index dbde7fd..0c701e6 100644 --- a/satrs-core/src/pus/event_man.rs +++ b/satrs-core/src/pus/event_man.rs @@ -10,7 +10,7 @@ use hashbrown::HashSet; #[cfg(feature = "alloc")] pub use crate::pus::event::EventReporter; -use crate::pus::verification::{TcStateStarted, VerificationToken}; +use crate::pus::verification::TcStateToken; #[cfg(feature = "alloc")] use crate::pus::EcssTmSenderCore; use crate::pus::EcssTmtcErrorWithSend; @@ -91,7 +91,7 @@ pub enum EventRequest { #[derive(Debug)] pub struct EventRequestWithToken { pub request: EventRequest, - pub token: VerificationToken, + pub token: TcStateToken, } #[derive(Debug)] diff --git a/satrs-core/src/pus/event_srv.rs b/satrs-core/src/pus/event_srv.rs new file mode 100644 index 0000000..ad098fa --- /dev/null +++ b/satrs-core/src/pus/event_srv.rs @@ -0,0 +1,144 @@ +use crate::events::EventU32; +use crate::pool::{SharedPool, StoreAddr}; +use crate::pus::event_man::{EventRequest, EventRequestWithToken}; +use crate::pus::verification::{ + StdVerifReporterWithSender, TcStateAccepted, TcStateToken, VerificationToken, +}; +use crate::pus::{ + AcceptedTc, PartialPusHandlingError, PusPacketHandlerResult, PusPacketHandlingError, + PusServiceBase, PusServiceHandler, +}; +use crate::tmtc::tm_helper::SharedTmStore; +use spacepackets::ecss::event::Subservice; +use spacepackets::ecss::PusPacket; +use spacepackets::tc::PusTc; +use std::format; +use std::sync::mpsc::{Receiver, Sender}; + +pub struct PusService5EventHandler { + psb: PusServiceBase, + event_request_tx: Sender, +} + +impl PusService5EventHandler { + pub fn new( + receiver: Receiver, + tc_pool: SharedPool, + tm_tx: Sender, + tm_store: SharedTmStore, + tm_apid: u16, + verification_handler: StdVerifReporterWithSender, + event_request_tx: Sender, + ) -> Self { + Self { + psb: PusServiceBase::new( + receiver, + tc_pool, + tm_tx, + tm_store, + tm_apid, + verification_handler, + ), + event_request_tx, + } + } +} + +impl PusServiceHandler for PusService5EventHandler { + fn psb_mut(&mut self) -> &mut PusServiceBase { + &mut self.psb + } + fn psb(&self) -> &PusServiceBase { + &self.psb + } + + fn handle_one_tc( + &mut self, + addr: StoreAddr, + token: VerificationToken, + ) -> Result { + { + // Keep locked section as short as possible. + let mut tc_pool = self + .psb + .tc_store + .write() + .map_err(|e| PusPacketHandlingError::RwGuardError(format!("{e}")))?; + let tc_guard = tc_pool.read_with_guard(addr); + let tc_raw = tc_guard.read().unwrap(); + self.psb.pus_buf[0..tc_raw.len()].copy_from_slice(tc_raw); + } + let (tc, _) = PusTc::from_bytes(&self.psb.pus_buf).unwrap(); + let srv = Subservice::try_from(tc.subservice()); + if srv.is_err() { + return Ok(PusPacketHandlerResult::CustomSubservice( + tc.subservice(), + token, + )); + } + let mut handle_enable_disable_request = |enable: bool| { + if tc.user_data().is_none() || tc.user_data().unwrap().len() < 4 { + return Err(PusPacketHandlingError::NotEnoughAppData( + "At least 4 bytes event ID expected".into(), + )); + } + let user_data = tc.user_data().unwrap(); + let event_u32 = EventU32::from(u32::from_be_bytes(user_data[0..4].try_into().unwrap())); + + let start_token = self + .psb + .verification_handler + .start_success(token, Some(&self.psb.stamp_buf)) + .map_err(|_| PartialPusHandlingError::VerificationError); + let partial_error = start_token.clone().err(); + let mut token: TcStateToken = token.into(); + if let Ok(start_token) = start_token { + token = start_token.into(); + } + let event_req_with_token = if enable { + EventRequestWithToken { + request: EventRequest::Enable(event_u32), + token, + } + } else { + EventRequestWithToken { + request: EventRequest::Disable(event_u32), + token, + } + }; + self.event_request_tx + .send(event_req_with_token) + .map_err(|_| { + PusPacketHandlingError::SendError("Forwarding event request failed".into()) + })?; + if let Some(partial_error) = partial_error { + return Ok(PusPacketHandlerResult::RequestHandledPartialSuccess( + partial_error, + )); + } + Ok(PusPacketHandlerResult::RequestHandled) + }; + match srv.unwrap() { + Subservice::TmInfoReport + | Subservice::TmLowSeverityReport + | Subservice::TmMediumSeverityReport + | Subservice::TmHighSeverityReport => { + return Err(PusPacketHandlingError::InvalidSubservice(tc.subservice())) + } + Subservice::TcEnableEventGeneration => { + handle_enable_disable_request(true)?; + } + Subservice::TcDisableEventGeneration => { + handle_enable_disable_request(false)?; + } + Subservice::TcReportDisabledList | Subservice::TmDisabledEventsReport => { + return Ok(PusPacketHandlerResult::SubserviceNotImplemented( + tc.subservice(), + token, + )); + } + } + + Ok(PusPacketHandlerResult::RequestHandled) + } +} diff --git a/satrs-core/src/pus/mod.rs b/satrs-core/src/pus/mod.rs index 127b84e..4c7cd18 100644 --- a/satrs-core/src/pus/mod.rs +++ b/satrs-core/src/pus/mod.rs @@ -11,6 +11,7 @@ use spacepackets::{ByteConversionError, SizeMissmatch}; pub mod event; pub mod event_man; +pub mod event_srv; pub mod hk; pub mod mode; pub mod scheduler; @@ -289,13 +290,17 @@ pub mod std_mod { PusError(#[from] PusError), #[error("Wrong service number {0} for packet handler")] WrongService(u8), + #[error("Invalid subservice {0}")] + InvalidSubservice(u8), #[error("Not enough application data available: {0}")] NotEnoughAppData(String), #[error("Generic store error: {0}")] StoreError(#[from] StoreError), - #[error("Error with the pool RwGuard")] + #[error("Error with the pool RwGuard: {0}")] RwGuardError(String), - #[error("MQ backend disconnect error")] + #[error("MQ send error: {0}")] + SendError(String), + #[error("TX message queue side has disconnected")] QueueDisconnected, #[error("Other error {0}")] OtherError(String), @@ -315,6 +320,7 @@ pub mod std_mod { pub enum PusPacketHandlerResult { RequestHandled, RequestHandledPartialSuccess(PartialPusHandlingError), + SubserviceNotImplemented(u8, VerificationToken), CustomSubservice(u8, VerificationToken), Empty, } diff --git a/satrs-core/src/pus/verification.rs b/satrs-core/src/pus/verification.rs index 6597302..95a54eb 100644 --- a/satrs-core/src/pus/verification.rs +++ b/satrs-core/src/pus/verification.rs @@ -227,6 +227,17 @@ impl From> for TcStateToken { } } +impl TryFrom for VerificationToken { + type Error = (); + + fn try_from(value: TcStateToken) -> Result { + if let TcStateToken::Accepted(token) = value { + Ok(token) + } else { + return Err(()); + } + } +} impl From> for TcStateToken { fn from(t: VerificationToken) -> Self { TcStateToken::Accepted(t) diff --git a/satrs-example/src/main.rs b/satrs-example/src/main.rs index dff9a5b..66f805e 100644 --- a/satrs-example/src/main.rs +++ b/satrs-example/src/main.rs @@ -9,6 +9,7 @@ use log::{info, warn}; use crate::hk::AcsHkIds; use crate::logging::setup_logger; +use crate::pus::event::Pus5Wrapper; use crate::pus::scheduler::Pus11Wrapper; use crate::pus::test::Service17CustomWrapper; use crate::pus::PusTcMpscRouter; @@ -26,6 +27,7 @@ use satrs_core::pus::event_man::{ DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken, PusEventDispatcher, }; +use satrs_core::pus::event_srv::PusService5EventHandler; use satrs_core::pus::hk::Subservice as HkSubservice; use satrs_core::pus::scheduler::PusScheduler; use satrs_core::pus::scheduler_srv::PusService11SchedHandler; @@ -141,7 +143,7 @@ fn main() { sock_addr, verif_reporter: verif_reporter.clone(), event_sender, - event_request_tx, + // event_request_tx, request_map, seq_count_provider: seq_count_provider_tmtc, }; @@ -184,16 +186,26 @@ fn main() { }; let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5)) .expect("Creating PUS Scheduler failed"); - let pus11_handler = PusService11SchedHandler::new( + let pus_11_handler = PusService11SchedHandler::new( pus_sched_rx, tc_store.pool.clone(), tm_funnel_tx.clone(), tm_store.clone(), PUS_APID, - verif_reporter, + verif_reporter.clone(), scheduler, ); - let mut pus_11_wrapper = Pus11Wrapper { pus11_handler }; + let mut pus_11_wrapper = Pus11Wrapper { pus_11_handler }; + let pus_5_handler = PusService5EventHandler::new( + pus_event_rx, + tc_store.pool.clone(), + tm_funnel_tx.clone(), + tm_store.clone(), + PUS_APID, + verif_reporter, + event_request_tx, + ); + let mut pus_5_wrapper = Pus5Wrapper { pus_5_handler }; info!("Starting TMTC task"); let jh0 = thread::Builder::new() @@ -236,7 +248,7 @@ fn main() { let mut time_provider = TimeProvider::new_with_u16_days(0, 0); let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| { reporter_event_handler - .completion_success(event_req.token, Some(timestamp)) + .completion_success(event_req.token.try_into().unwrap(), Some(timestamp)) .expect("Sending completion success failed"); }; loop { @@ -366,6 +378,7 @@ fn main() { }; is_srv_finished(pus_17_wrapper.perform_operation()); is_srv_finished(pus_11_wrapper.perform_operation()); + is_srv_finished(pus_5_wrapper.perform_operation()); if all_queues_empty { thread::sleep(Duration::from_millis(200)); } diff --git a/satrs-example/src/pus/event.rs b/satrs-example/src/pus/event.rs new file mode 100644 index 0000000..ef47c87 --- /dev/null +++ b/satrs-example/src/pus/event.rs @@ -0,0 +1,33 @@ +use log::{error, warn}; +use satrs_core::pus::event_srv::PusService5EventHandler; +use satrs_core::pus::{PusPacketHandlerResult, PusServiceHandler}; + +pub struct Pus5Wrapper { + pub pus_5_handler: PusService5EventHandler, +} + +impl Pus5Wrapper { + pub fn perform_operation(&mut self) -> bool { + match self.pus_5_handler.handle_next_packet() { + Ok(result) => match result { + PusPacketHandlerResult::RequestHandled => {} + PusPacketHandlerResult::RequestHandledPartialSuccess(e) => { + warn!("PUS 5 partial packet handling success: {e:?}") + } + PusPacketHandlerResult::CustomSubservice(invalid, _) => { + warn!("PUS 5 invalid subservice {invalid}"); + } + PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => { + warn!("PUS 5 subservice {subservice} not implemented"); + } + PusPacketHandlerResult::Empty => { + return true; + } + }, + Err(error) => { + error!("PUS packet handling error: {error:?}") + } + } + false + } +} diff --git a/satrs-example/src/pus/events.rs b/satrs-example/src/pus/events.rs deleted file mode 100644 index 554bbee..0000000 --- a/satrs-example/src/pus/events.rs +++ /dev/null @@ -1,59 +0,0 @@ -use satrs_core::event_man::{EventManager, EventManagerWithMpscQueue}; -use satrs_core::events::EventU32; -use satrs_core::params::Params; -use satrs_core::pool::{SharedPool, StoreAddr}; -use satrs_core::pus::event_man::EventReporter; -use satrs_core::pus::verification::{ - StdVerifReporterWithSender, TcStateAccepted, VerificationToken, -}; -use satrs_core::pus::{ - AcceptedTc, PusPacketHandlerResult, PusPacketHandlingError, PusServiceBase, PusServiceHandler, -}; -use satrs_core::tmtc::tm_helper::SharedTmStore; -use std::sync::mpsc::{Receiver, Sender}; - -pub struct PusService5EventHandler { - psb: PusServiceBase, - event_manager: EventManagerWithMpscQueue, -} - -impl PusService5EventHandler { - pub fn new( - receiver: Receiver, - tc_pool: SharedPool, - tm_tx: Sender, - tm_store: SharedTmStore, - tm_apid: u16, - verification_handler: StdVerifReporterWithSender, - event_manager: EventManagerWithMpscQueue, - ) -> Self { - Self { - psb: PusServiceBase::new( - receiver, - tc_pool, - tm_tx, - tm_store, - tm_apid, - verification_handler, - ), - event_manager, - } - } -} - -impl PusServiceHandler for PusService5EventHandler { - fn psb_mut(&mut self) -> &mut PusServiceBase { - &mut self.psb - } - fn psb(&self) -> &PusServiceBase { - &self.psb - } - - fn handle_one_tc( - &mut self, - addr: StoreAddr, - token: VerificationToken, - ) -> Result { - Ok(PusPacketHandlerResult::RequestHandled) - } -} diff --git a/satrs-example/src/pus/mod.rs b/satrs-example/src/pus/mod.rs index 9fb1e9c..a3a2292 100644 --- a/satrs-example/src/pus/mod.rs +++ b/satrs-example/src/pus/mod.rs @@ -9,11 +9,11 @@ use satrs_core::spacepackets::ecss::PusServiceId; use satrs_core::spacepackets::tc::PusTc; use satrs_core::spacepackets::time::cds::TimeProvider; use satrs_core::spacepackets::time::TimeWriter; -use satrs_core::tmtc::tm_helper::{PusTmWithCdsShortHelper, SharedTmStore}; +use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper; use satrs_example::{tmtc_err, CustomPusServiceId}; use std::sync::mpsc::Sender; -pub mod events; +pub mod event; pub mod scheduler; pub mod test; @@ -25,31 +25,6 @@ pub struct PusTcMpscRouter { pub action_service_receiver: Sender, } -// impl PusTcRouter for PusTcMpscRouter { -// type Error = (); -// -// fn route_pus_tc(&mut self, apid: u16, service: u8, subservice: u8, tc: &PusTc) { -// if apid == PUS_APID { -// if service == PusServiceId::Event as u8 { -// self.event_service_receiver.send_tc(*tc).unwrap(); -// } -// if service == PusServiceId::Action as u8 { -// // TODO: Look up object ID and then route the action request to that object. -// self.action_service_receiver.send_tc(*tc).unwrap(); -// } -// if service == PusServiceId::Housekeeping as u8 { -// // TODO: Look up object ID and then route the HK request to that object. -// } -// if service == PusServiceId::Scheduling as u8 { -// self.sched_service_receiver.send_tc(*tc).unwrap(); -// } -// if service == PusServiceId::Test as u8 { -// self.test_service_receiver.send_tc(*tc).unwrap(); -// } -// } -// todo!() -// } -// } pub struct PusReceiver { pub tm_helper: PusTmWithCdsShortHelper, pub tm_args: PusTmArgs, @@ -58,10 +33,6 @@ pub struct PusReceiver { } pub struct PusTmArgs { - /// All telemetry is sent with this sender handle. - pub tm_tx: Sender, - /// All TM to be sent is stored here - pub tm_store: SharedTmStore, /// All verification reporting is done with this reporter. pub verif_reporter: StdVerifReporterWithSender, /// Sequence count provider for TMs sent from within pus demultiplexer @@ -74,55 +45,9 @@ impl PusTmArgs { } } -// #[allow(dead_code)] -// pub struct PusTcHandlerBase { -// pub tc_store: Box, -// pub receiver: Receiver<(StoreAddr, VerificationToken)>, -// pub verif_reporter: StdVerifReporterWithSender, -// pub time_provider: Box, -// } -// -// pub trait TestHandlerNoPing { -// fn handle_no_ping_tc(&mut self, tc: PusTc); -// } -// -// #[allow(dead_code)] -// pub struct PusTestTcHandler { -// pub base: PusTcHandlerBase, -// handler: Option>, -// } -// -// #[allow(dead_code)] -// pub struct PusScheduleTcHandler { -// pub base: PusTestTcHandler, -// } -// -// impl PusTestTcHandler { -// #[allow(dead_code)] -// pub fn operation(&mut self) { -// let (addr, token) = self.base.receiver.recv().unwrap(); -// let data = self.base.tc_store.read(&addr).unwrap(); -// let (pus_tc, _len) = PusTc::from_bytes(data).unwrap(); -// let stamp: [u8; 7] = [0; 7]; -// if pus_tc.subservice() == 1 { -// self.base -// .verif_reporter -// .completion_success(token, Some(&stamp)) -// .unwrap(); -// } else if let Some(handler) = &mut self.handler { -// handler.handle_no_ping_tc(pus_tc); -// } -// } -// } - pub struct PusTcArgs { - //pub event_request_tx: Sender, /// This routes all telecommands to their respective recipients pub pus_router: PusTcMpscRouter, - /// Request routing helper. Maps targeted requests to their recipient. - //pub request_map: HashMap>, - /// Required for scheduling of telecommands. - //pub tc_source: PusTcSource, /// Used to send events from within the TC router pub event_sender: Sender<(EventU32, Option)>, } @@ -330,64 +255,6 @@ impl PusReceiver { // } // impl PusReceiver { -// fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken) { -// match PusPacket::subservice(pus_tc) { -// 1 => { -// info!("Received PUS ping command TC[17,1]"); -// info!("Sending ping reply PUS TM[17,2]"); -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("Error sending start success"); -// let ping_reply = self.tm_helper.create_pus_tm_timestamp_now( -// 17, -// 2, -// None, -// self.tm_args.seq_count_provider.get(), -// ); -// let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply); -// self.tm_args -// .tm_tx -// .send(addr) -// .expect("Sending TM to TM funnel failed"); -// self.tm_args.seq_count_provider.increment(); -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("Error sending completion success"); -// } -// 128 => { -// info!("Generating test event"); -// self.tc_args -// .event_sender -// .send((TEST_EVENT.into(), None)) -// .expect("Sending test event failed"); -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("Error sending start success"); -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("Error sending completion success"); -// } -// _ => { -// self.tm_args -// .verif_reporter -// .start_failure( -// token, -// FailParams::new( -// Some(self.stamp_helper.stamp()), -// &tmtc_err::INVALID_PUS_SUBSERVICE, -// None, -// ), -// ) -// .expect("Sending start failure TM failed"); -// } -// } -// } // // fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken) { // if pus_tc.user_data().is_none() { @@ -495,204 +362,6 @@ impl PusReceiver { // } // } // -// fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken) { -// let send_start_failure = |vr: &mut StdVerifReporterWithSender, -// timestamp: &[u8], -// failure_code: &ResultU16, -// failure_data: Option<&[u8]>| { -// vr.start_failure( -// token, -// FailParams::new(Some(timestamp), failure_code, failure_data), -// ) -// .expect("Sending start failure TM failed"); -// }; -// let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| { -// vr.start_success(token, Some(timestamp)) -// .expect("Sending start success TM failed") -// }; -// if pus_tc.user_data().is_none() { -// send_start_failure( -// &mut self.tm_args.verif_reporter, -// self.stamp_helper.stamp(), -// &tmtc_err::NOT_ENOUGH_APP_DATA, -// None, -// ); -// return; -// } -// let app_data = pus_tc.user_data().unwrap(); -// if app_data.len() < 4 { -// send_start_failure( -// &mut self.tm_args.verif_reporter, -// self.stamp_helper.stamp(), -// &tmtc_err::NOT_ENOUGH_APP_DATA, -// None, -// ); -// return; -// } -// let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap())); -// match PusPacket::subservice(pus_tc).try_into() { -// Ok(event::Subservice::TcEnableEventGeneration) => { -// let start_token = send_start_acceptance( -// &mut self.tm_args.verif_reporter, -// self.stamp_helper.stamp(), -// ); -// self.tc_args -// .event_request_tx -// .send(EventRequestWithToken { -// request: EventRequest::Enable(event_id), -// token: start_token, -// }) -// .expect("Sending event request failed"); -// } -// Ok(event::Subservice::TcDisableEventGeneration) => { -// let start_token = send_start_acceptance( -// &mut self.tm_args.verif_reporter, -// self.stamp_helper.stamp(), -// ); -// self.tc_args -// .event_request_tx -// .send(EventRequestWithToken { -// request: EventRequest::Disable(event_id), -// token: start_token, -// }) -// .expect("Sending event request failed"); -// } -// _ => { -// send_start_failure( -// &mut self.tm_args.verif_reporter, -// self.stamp_helper.stamp(), -// &tmtc_err::INVALID_PUS_SUBSERVICE, -// None, -// ); -// } -// } -// } -// -// fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken) { -// let subservice = match pus_11_generic_tc_check(pus_tc) { -// Ok(subservice) => subservice, -// Err(e) => match e { -// GenericTcCheckError::NotEnoughAppData => { -// self.tm_args -// .verif_reporter -// .start_failure( -// token, -// FailParams::new( -// Some(self.stamp_helper.stamp()), -// &tmtc_err::NOT_ENOUGH_APP_DATA, -// None, -// ), -// ) -// .expect("could not sent verification error"); -// return; -// } -// GenericTcCheckError::InvalidSubservice => { -// self.tm_args -// .verif_reporter -// .start_failure( -// token, -// FailParams::new( -// Some(self.stamp_helper.stamp()), -// &tmtc_err::INVALID_PUS_SUBSERVICE, -// None, -// ), -// ) -// .expect("could not sent verification error"); -// return; -// } -// }, -// }; -// match subservice { -// scheduling::Subservice::TcEnableScheduling => { -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("Error sending start success"); -// -// let mut scheduler = self.tc_args.scheduler.borrow_mut(); -// scheduler.enable(); -// if scheduler.is_enabled() { -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("Error sending completion success"); -// } else { -// panic!("Failed to enable scheduler"); -// } -// } -// scheduling::Subservice::TcDisableScheduling => { -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("Error sending start success"); -// -// let mut scheduler = self.tc_args.scheduler.borrow_mut(); -// scheduler.disable(); -// if !scheduler.is_enabled() { -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("Error sending completion success"); -// } else { -// panic!("Failed to disable scheduler"); -// } -// } -// scheduling::Subservice::TcResetScheduling => { -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("Error sending start success"); -// -// let mut pool = self -// .tc_args -// .tc_source -// .tc_store -// .pool -// .write() -// .expect("Locking pool failed"); -// -// let mut scheduler = self.tc_args.scheduler.borrow_mut(); -// scheduler -// .reset(pool.as_mut()) -// .expect("Error resetting TC Pool"); -// drop(scheduler); -// -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("Error sending completion success"); -// } -// scheduling::Subservice::TcInsertActivity => { -// let start_token = self -// .tm_args -// .verif_reporter -// .start_success(token, Some(self.stamp_helper.stamp())) -// .expect("error sending start success"); -// -// let mut pool = self -// .tc_args -// .tc_source -// .tc_store -// .pool -// .write() -// .expect("locking pool failed"); -// let mut scheduler = self.tc_args.scheduler.borrow_mut(); -// scheduler -// .insert_wrapped_tc::(pus_tc, pool.as_mut()) -// .expect("insertion of activity into pool failed"); -// drop(scheduler); -// -// self.tm_args -// .verif_reporter -// .completion_success(start_token, Some(self.stamp_helper.stamp())) -// .expect("sending completion success failed"); -// } -// _ => {} -// } -// } // // fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken) { // let mut app_data_len = 0; diff --git a/satrs-example/src/pus/scheduler.rs b/satrs-example/src/pus/scheduler.rs index 1b8d194..2f10636 100644 --- a/satrs-example/src/pus/scheduler.rs +++ b/satrs-example/src/pus/scheduler.rs @@ -3,12 +3,12 @@ use satrs_core::pus::scheduler_srv::PusService11SchedHandler; use satrs_core::pus::{PusPacketHandlerResult, PusServiceHandler}; pub struct Pus11Wrapper { - pub pus11_handler: PusService11SchedHandler, + pub pus_11_handler: PusService11SchedHandler, } impl Pus11Wrapper { pub fn perform_operation(&mut self) -> bool { - match self.pus11_handler.handle_next_packet() { + match self.pus_11_handler.handle_next_packet() { Ok(result) => match result { PusPacketHandlerResult::RequestHandled => {} PusPacketHandlerResult::RequestHandledPartialSuccess(e) => { @@ -17,6 +17,9 @@ impl Pus11Wrapper { PusPacketHandlerResult::CustomSubservice(invalid, _) => { warn!("PUS11 invalid subservice {invalid}"); } + PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => { + warn!("PUS11: Subservice {subservice} not implemented"); + } PusPacketHandlerResult::Empty => { return true; } diff --git a/satrs-example/src/pus/test.rs b/satrs-example/src/pus/test.rs index 7aa1229..daf8727 100644 --- a/satrs-example/src/pus/test.rs +++ b/satrs-example/src/pus/test.rs @@ -33,6 +33,9 @@ impl Service17CustomWrapper { partial_err ); } + PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => { + warn!("PUS17: Subservice {subservice} not implemented") + } PusPacketHandlerResult::CustomSubservice(subservice, token) => { let (buf, _) = self.pus17_handler.pus_tc_buf(); let (tc, _) = PusTc::from_bytes(buf).unwrap(); diff --git a/satrs-example/src/tmtc.rs b/satrs-example/src/tmtc.rs index accef97..86b5c1c 100644 --- a/satrs-example/src/tmtc.rs +++ b/satrs-example/src/tmtc.rs @@ -16,7 +16,6 @@ use crate::ccsds::CcsdsReceiver; use crate::pus::{PusReceiver, PusTcArgs, PusTcMpscRouter, PusTmArgs}; use crate::requests::RequestWithToken; use satrs_core::pool::{SharedPool, StoreAddr, StoreError}; -use satrs_core::pus::event_man::EventRequestWithToken; use satrs_core::pus::scheduler::{PusScheduler, TcInfo}; use satrs_core::pus::verification::StdVerifReporterWithSender; use satrs_core::seq_count::SeqCountProviderSyncClonable; @@ -32,7 +31,6 @@ pub struct OtherArgs { pub sock_addr: SocketAddr, pub verif_reporter: StdVerifReporterWithSender, pub event_sender: Sender<(EventU32, Option)>, - pub event_request_tx: Sender, pub request_map: HashMap>, pub seq_count_provider: SeqCountProviderSyncClonable, } @@ -161,8 +159,6 @@ pub fn core_tmtc_task( )); let pus_tm_args = PusTmArgs { - tm_tx: tm_args.tm_sink_sender, - tm_store: tm_args.tm_store.clone(), verif_reporter: args.verif_reporter, seq_count_provider: args.seq_count_provider.clone(), };