Merge pull request 'Add basic mode request handling' (#34) from add_mode_request into main

Reviewed-on: rust/satrs-launchpad#34
This commit is contained in:
Robin Müller 2023-02-15 17:16:42 +01:00
commit cea5b3eeb1
13 changed files with 449 additions and 179 deletions

View File

@ -17,6 +17,10 @@ delegate = ">=0.8, <0.10"
paste = "1" paste = "1"
embed-doc-image = "0.1" embed-doc-image = "0.1"
[dependencies.num_enum]
version = "0.5"
default-features = false
[dependencies.dyn-clone] [dependencies.dyn-clone]
version = "1" version = "1"
optional = true optional = true
@ -78,7 +82,8 @@ std = [
"postcard/use-std", "postcard/use-std",
"crossbeam-channel/std", "crossbeam-channel/std",
"serde/std", "serde/std",
"spacepackets/std" "spacepackets/std",
"num_enum/std"
] ]
alloc = [ alloc = [
"serde/alloc", "serde/alloc",

View File

@ -26,6 +26,7 @@ pub mod events;
pub mod executable; pub mod executable;
pub mod hal; pub mod hal;
pub mod hk; pub mod hk;
pub mod mode;
pub mod objects; pub mod objects;
pub mod params; pub mod params;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]

View File

@ -0,0 +1,62 @@
use crate::tmtc::TargetId;
use core::mem::size_of;
use serde::{Deserialize, Serialize};
use spacepackets::{ByteConversionError, SizeMissmatch};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ModeAndSubmode {
mode: u32,
submode: u16,
}
impl ModeAndSubmode {
pub const fn new_mode_only(mode: u32) -> Self {
Self { mode, submode: 0 }
}
pub const fn new(mode: u32, submode: u16) -> Self {
Self { mode, submode }
}
pub fn raw_len() -> usize {
size_of::<u32>() + size_of::<u16>()
}
pub fn from_be_bytes(buf: &[u8]) -> Result<Self, ByteConversionError> {
if buf.len() < 6 {
return Err(ByteConversionError::FromSliceTooSmall(SizeMissmatch {
expected: 6,
found: buf.len(),
}));
}
Ok(Self {
mode: u32::from_be_bytes(buf[0..4].try_into().unwrap()),
submode: u16::from_be_bytes(buf[4..6].try_into().unwrap()),
})
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ModeCommand {
address: TargetId,
mode_submode: ModeAndSubmode,
}
impl ModeCommand {
pub const fn new(address: TargetId, mode_submode: ModeAndSubmode) -> Self {
Self {
address,
mode_submode,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum ModeRequest {
SetMode(ModeCommand),
ReadMode(TargetId),
AnnounceMode(TargetId),
AnnounceModeRecursive(TargetId),
}

View File

@ -32,19 +32,6 @@ pub trait PowerSwitcherCommandSender {
fn send_switch_on_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>; fn send_switch_on_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>;
fn send_switch_off_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>; fn send_switch_off_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>;
fn switch_on<T: PowerSwitch>(
&mut self,
switch: &mut T,
) -> Result<(), <T as PowerSwitch>::Error> {
switch.switch_on()
}
fn switch_off<T: PowerSwitch>(
&mut self,
switch: &mut T,
) -> Result<(), <T as PowerSwitch>::Error> {
switch.switch_off()
}
} }
pub trait PowerSwitchInfo { pub trait PowerSwitchInfo {

View File

@ -11,6 +11,7 @@ use spacepackets::{ByteConversionError, SizeMissmatch};
pub mod event; pub mod event;
pub mod event_man; pub mod event_man;
pub mod hk; pub mod hk;
pub mod mode;
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub mod scheduling; pub mod scheduling;
pub mod verification; pub mod verification;
@ -88,6 +89,12 @@ mod alloc_mod {
impl_downcast!(EcssTmSender assoc Error); impl_downcast!(EcssTmSender assoc Error);
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum GenericTcCheckError {
NotEnoughAppData,
InvalidSubservice,
}
pub(crate) fn source_buffer_large_enough(cap: usize, len: usize) -> Result<(), EcssTmError> { pub(crate) fn source_buffer_large_enough(cap: usize, len: usize) -> Result<(), EcssTmError> {
if len > cap { if len > cap {
return Err(EcssTmError::ByteConversionError( return Err(EcssTmError::ByteConversionError(

View File

@ -0,0 +1,16 @@
use num_enum::{IntoPrimitive, TryFromPrimitive};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
#[derive(Debug, Eq, PartialEq, Copy, Clone, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[repr(u8)]
pub enum Subservice {
TcSetMode = 1,
TcReadMode = 3,
TcAnnounceMode = 4,
TcAnnounceModeRecursive = 5,
TmModeReply = 6,
TmCantReachMode = 7,
TmWrongModeReply = 8,
}

View File

@ -72,8 +72,11 @@
//! The [integration test](https://egit.irs.uni-stuttgart.de/rust/fsrc-launchpad/src/branch/main/fsrc-core/tests/verification_test.rs) //! The [integration test](https://egit.irs.uni-stuttgart.de/rust/fsrc-launchpad/src/branch/main/fsrc-core/tests/verification_test.rs)
//! for the verification module contains examples how this module could be used in a more complex //! for the verification module contains examples how this module could be used in a more complex
//! context involving multiple threads //! context involving multiple threads
use crate::pus::{source_buffer_large_enough, EcssTmError, EcssTmErrorWithSend, EcssTmSenderCore}; use crate::pus::{
use core::fmt::{Display, Formatter}; source_buffer_large_enough, EcssTmError, EcssTmErrorWithSend, EcssTmSenderCore,
GenericTcCheckError,
};
use core::fmt::{Debug, Display, Formatter};
use core::hash::{Hash, Hasher}; use core::hash::{Hash, Hasher};
use core::marker::PhantomData; use core::marker::PhantomData;
use core::mem::size_of; use core::mem::size_of;
@ -81,7 +84,7 @@ use core::mem::size_of;
use delegate::delegate; use delegate::delegate;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use spacepackets::ecss::EcssEnumeration; use spacepackets::ecss::{scheduling, EcssEnumeration, PusPacket};
use spacepackets::tc::PusTc; use spacepackets::tc::PusTc;
use spacepackets::tm::{PusTm, PusTmSecondaryHeader}; use spacepackets::tm::{PusTm, PusTmSecondaryHeader};
use spacepackets::{CcsdsPacket, PacketId, PacketSequenceCtrl}; use spacepackets::{CcsdsPacket, PacketId, PacketSequenceCtrl};
@ -1366,6 +1369,21 @@ mod stdmod {
} }
} }
pub fn pus_11_generic_tc_check(
pus_tc: &PusTc,
) -> Result<scheduling::Subservice, GenericTcCheckError> {
if pus_tc.user_data().is_none() {
return Err(GenericTcCheckError::NotEnoughAppData);
}
let subservice: scheduling::Subservice = match pus_tc.subservice().try_into() {
Ok(subservice) => subservice,
Err(_) => {
return Err(GenericTcCheckError::InvalidSubservice);
}
};
Ok(subservice)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::pool::{LocalPool, PoolCfg, SharedPool}; use crate::pool::{LocalPool, PoolCfg, SharedPool};

View File

@ -23,10 +23,12 @@ pub use ccsds_distrib::{CcsdsDistributor, CcsdsError, CcsdsPacketHandler};
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
pub use pus_distrib::{PusDistributor, PusServiceProvider}; pub use pus_distrib::{PusDistributor, PusServiceProvider};
pub type TargetId = u32;
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct AddressableId { pub struct AddressableId {
pub target_id: u32, pub target_id: TargetId,
pub unique_id: u32, pub unique_id: u32,
} }

View File

@ -10,6 +10,7 @@ crossbeam-channel = "0.5"
delegate = "0.9" delegate = "0.9"
zerocopy = "0.6" zerocopy = "0.6"
csv = "1" csv = "1"
num_enum = "0.5"
[dependencies.satrs-core] [dependencies.satrs-core]
path = "../satrs-core" path = "../satrs-core"

View File

@ -1,9 +1,17 @@
use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs_core::events::{EventU32TypedSev, SeverityInfo}; use satrs_core::events::{EventU32TypedSev, SeverityInfo};
use std::net::Ipv4Addr; use std::net::Ipv4Addr;
use satrs_mib::res_code::{ResultU16, ResultU16Info}; use satrs_mib::res_code::{ResultU16, ResultU16Info};
use satrs_mib::resultcode; use satrs_mib::resultcode;
#[derive(Copy, Clone, PartialEq, Eq, Debug, TryFromPrimitive, IntoPrimitive)]
#[repr(u8)]
pub enum CustomPusServiceId {
Mode = 200,
Health = 201,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum RequestTargetId { pub enum RequestTargetId {
AcsSubsystem = 1, AcsSubsystem = 1,
@ -28,8 +36,14 @@ pub mod tmtc_err {
pub const INVALID_PUS_SERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 0); pub const INVALID_PUS_SERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 0);
#[resultcode] #[resultcode]
pub const INVALID_PUS_SUBSERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 1); pub const INVALID_PUS_SUBSERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 1);
#[resultcode]
pub const PUS_SERVICE_NOT_IMPLEMENTED: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
#[resultcode(info = "Not enough data inside the TC application data field")] #[resultcode(
info = "Not enough data inside the TC application data field. Optionally includes: \
8 bytes of failure data containing 2 failure parameters, \
P1 (u32 big endian): Expected data length, P2: Found data length"
)]
pub const NOT_ENOUGH_APP_DATA: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2); pub const NOT_ENOUGH_APP_DATA: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
pub const TMTC_RESULTS: &[ResultU16Info] = &[ pub const TMTC_RESULTS: &[ResultU16Info] = &[

View File

@ -245,6 +245,9 @@ fn main() {
HkRequest::Disable(_) => {} HkRequest::Disable(_) => {}
HkRequest::ModifyCollectionInterval(_, _) => {} HkRequest::ModifyCollectionInterval(_, _) => {}
}, },
Request::ModeRequest(_mode_req) => {
println!("mode request handling not implemented yet")
}
} }
let started_token = reporter_aocs let started_token = reporter_aocs
.start_success(request.1, Some(&timestamp)) .start_success(request.1, Some(&timestamp))

View File

@ -2,42 +2,39 @@ use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore}; use crate::tmtc::{PusTcSource, TmStore};
use satrs_core::events::EventU32; use satrs_core::events::EventU32;
use satrs_core::hk::{CollectionIntervalFactor, HkRequest}; use satrs_core::hk::{CollectionIntervalFactor, HkRequest};
use satrs_core::mode::{ModeAndSubmode, ModeCommand, ModeRequest};
use satrs_core::params::Params; use satrs_core::params::Params;
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
use satrs_core::pus::event;
use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken}; use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken};
use satrs_core::pus::hk; use satrs_core::pus::hk;
use satrs_core::pus::mode;
use satrs_core::pus::mode::Subservice;
use satrs_core::pus::scheduling::PusScheduler; use satrs_core::pus::scheduling::PusScheduler;
use satrs_core::pus::verification::{ use satrs_core::pus::verification::{
FailParams, StdVerifReporterWithSender, TcStateAccepted, VerificationToken, pus_11_generic_tc_check, FailParams, StdVerifReporterWithSender, TcStateAccepted,
VerificationToken,
}; };
use satrs_core::pus::{event, GenericTcCheckError};
use satrs_core::res_code::ResultU16; use satrs_core::res_code::ResultU16;
use satrs_core::spacepackets::ecss::scheduling; use satrs_core::spacepackets::ecss::{scheduling, PusServiceId};
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper; use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider}; use satrs_core::tmtc::{AddressableId, PusServiceProvider, TargetId};
use satrs_core::{ use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider, spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader, spacepackets::time::TimeWriter, spacepackets::SpHeader,
}; };
use satrs_example::{hk_err, tmtc_err, TEST_EVENT}; use satrs_example::{hk_err, tmtc_err, CustomPusServiceId, TEST_EVENT};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom;
use std::rc::Rc; use std::rc::Rc;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
pub struct PusReceiver { pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper, pub tm_helper: PusTmWithCdsShortHelper,
pub tm_tx: Sender<StoreAddr>, pub tm_args: PusTmArgs,
pub tm_store: TmStore, pub tc_args: PusTcArgs,
pub verif_reporter: StdVerifReporterWithSender, stamp_helper: TimeStampHelper,
#[allow(dead_code)]
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
event_sender: Sender<(EventU32, Option<Params>)>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
stamper: TimeProvider,
time_stamp: [u8; 7],
scheduler: Rc<RefCell<PusScheduler>>,
} }
pub struct PusTmArgs { pub struct PusTmArgs {
@ -49,30 +46,56 @@ pub struct PusTmArgs {
pub verif_reporter: StdVerifReporterWithSender, pub verif_reporter: StdVerifReporterWithSender,
} }
impl PusTmArgs {
fn vr(&mut self) -> &mut StdVerifReporterWithSender {
&mut self.verif_reporter
}
}
pub struct PusTcArgs { pub struct PusTcArgs {
pub event_request_tx: Sender<EventRequestWithToken>, pub event_request_tx: Sender<EventRequestWithToken>,
/// Request routing helper. Maps targeted request to their recipient. /// Request routing helper. Maps targeted request to their recipient.
pub request_map: HashMap<u32, Sender<RequestWithToken>>, pub request_map: HashMap<TargetId, Sender<RequestWithToken>>,
/// Required for scheduling of telecommands. /// Required for scheduling of telecommands.
pub tc_source: PusTcSource, pub tc_source: PusTcSource,
pub event_sender: Sender<(EventU32, Option<Params>)>, pub event_sender: Sender<(EventU32, Option<Params>)>,
pub scheduler: Rc<RefCell<PusScheduler>>, pub scheduler: Rc<RefCell<PusScheduler>>,
} }
struct TimeStampHelper {
stamper: TimeProvider,
time_stamp: [u8; 7],
}
impl TimeStampHelper {
pub fn new() -> Self {
Self {
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
}
}
pub fn stamp(&self) -> &[u8] {
&self.time_stamp
}
pub fn update_from_now(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
}
impl PusReceiver { impl PusReceiver {
pub fn new(apid: u16, tm_arguments: PusTmArgs, tc_arguments: PusTcArgs) -> Self { pub fn new(apid: u16, tm_arguments: PusTmArgs, tc_arguments: PusTcArgs) -> Self {
Self { Self {
tm_helper: PusTmWithCdsShortHelper::new(apid), tm_helper: PusTmWithCdsShortHelper::new(apid),
tm_tx: tm_arguments.tm_tx, tm_args: tm_arguments,
tm_store: tm_arguments.tm_store, tc_args: tc_arguments,
verif_reporter: tm_arguments.verif_reporter, stamp_helper: TimeStampHelper::new(),
tc_source: tc_arguments.tc_source,
event_request_tx: tc_arguments.event_request_tx,
event_sender: tc_arguments.event_sender,
request_map: tc_arguments.request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
scheduler: tc_arguments.scheduler,
} }
} }
} }
@ -86,29 +109,56 @@ impl PusServiceProvider for PusReceiver {
_header: &SpHeader, _header: &SpHeader,
pus_tc: &PusTc, pus_tc: &PusTc,
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let init_token = self.verif_reporter.add_tc(pus_tc); let init_token = self.tm_args.verif_reporter.add_tc(pus_tc);
self.update_time_stamp(); self.stamp_helper.update_from_now();
let accepted_token = self let accepted_token = self
.verif_reporter .tm_args
.acceptance_success(init_token, Some(&self.time_stamp)) .vr()
.acceptance_success(init_token, Some(self.stamp_helper.stamp()))
.expect("Acceptance success failure"); .expect("Acceptance success failure");
if service == 17 { let service = PusServiceId::try_from(service);
self.handle_test_service(pus_tc, accepted_token); match service {
} else if service == 5 { Ok(standard_service) => match standard_service {
self.handle_event_request(pus_tc, accepted_token); PusServiceId::Test => self.handle_test_service(pus_tc, accepted_token),
} else if service == 3 { PusServiceId::Housekeeping => self.handle_hk_request(pus_tc, accepted_token),
self.handle_hk_request(pus_tc, accepted_token); PusServiceId::Event => self.handle_event_request(pus_tc, accepted_token),
} else if service == 11 { PusServiceId::Scheduling => self.handle_scheduled_tc(pus_tc, accepted_token),
self.handle_scheduled_tc(pus_tc, accepted_token); _ => self
} else { .tm_args
self.update_time_stamp(); .verif_reporter
self.verif_reporter
.start_failure( .start_failure(
accepted_token, accepted_token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::INVALID_PUS_SERVICE, None), FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::PUS_SERVICE_NOT_IMPLEMENTED,
Some(&[standard_service as u8]),
),
)
.expect("Start failure verification failed"),
},
Err(e) => {
if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) {
match custom_service {
CustomPusServiceId::Mode => {
self.handle_mode_service(pus_tc, accepted_token)
}
CustomPusServiceId::Health => {}
}
} else {
self.tm_args
.verif_reporter
.start_failure(
accepted_token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
Some(&[e.number]),
),
) )
.expect("Start failure verification failed") .expect("Start failure verification failed")
} }
}
}
Ok(()) Ok(())
} }
} }
@ -120,38 +170,42 @@ impl PusReceiver {
println!("Received PUS ping command TC[17,1]"); println!("Received PUS ping command TC[17,1]");
println!("Sending ping reply PUS TM[17,2]"); println!("Sending ping reply PUS TM[17,2]");
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None); let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
let addr = self.tm_store.add_pus_tm(&ping_reply); let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply);
self.tm_tx self.tm_args
.tm_tx
.send(addr) .send(addr)
.expect("Sending TM to TM funnel failed"); .expect("Sending TM to TM funnel failed");
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success"); .expect("Error sending completion success");
} }
128 => { 128 => {
self.update_time_stamp(); self.tc_args.event_sender
self.event_sender
.send((TEST_EVENT.into(), None)) .send((TEST_EVENT.into(), None))
.expect("Sending test event failed"); .expect("Sending test event failed");
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success"); .expect("Error sending completion success");
} }
_ => { _ => {
self.update_time_stamp(); self.tm_args
self.verif_reporter .verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new( FailParams::new(
Some(&self.time_stamp), Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE, &tmtc_err::INVALID_PUS_SUBSERVICE,
None, None,
), ),
@ -161,22 +215,17 @@ impl PusReceiver {
} }
} }
fn update_time_stamp(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() { if pus_tc.user_data().is_none() {
self.update_time_stamp(); self.tm_args
self.verif_reporter .verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None), FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return;
@ -188,25 +237,32 @@ impl PusReceiver {
} else { } else {
&hk_err::UNIQUE_ID_MISSING &hk_err::UNIQUE_ID_MISSING
}; };
self.update_time_stamp(); self.tm_args
self.verif_reporter .verif_reporter
.start_failure(token, FailParams::new(Some(&self.time_stamp), err, None)) .start_failure(
token,
FailParams::new(Some(self.stamp_helper.stamp()), err, None),
)
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return;
} }
let addressable_id = AddressableId::from_raw_be(user_data).unwrap(); let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) { if !self.tc_args.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp(); self.tm_args
self.verif_reporter .verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new(Some(&self.time_stamp), &hk_err::UNKNOWN_TARGET_ID, None), FailParams::new(
Some(self.stamp_helper.stamp()),
&hk_err::UNKNOWN_TARGET_ID,
None,
),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return;
} }
let send_request = |request: HkRequest| { let send_request = |request: HkRequest| {
let sender = self.request_map.get(&addressable_id.target_id).unwrap(); let sender = self.tc_args.request_map.get(&addressable_id.target_id).unwrap();
sender sender
.send(RequestWithToken(Request::HkRequest(request), token)) .send(RequestWithToken(Request::HkRequest(request), token))
.unwrap_or_else(|_| panic!("Sending HK request {request:?} failed")); .unwrap_or_else(|_| panic!("Sending HK request {request:?} failed"));
@ -221,12 +277,12 @@ impl PusReceiver {
== hk::Subservice::TcModifyHkCollectionInterval as u8 == hk::Subservice::TcModifyHkCollectionInterval as u8
{ {
if user_data.len() < 12 { if user_data.len() < 12 {
self.update_time_stamp(); self.tm_args
self.verif_reporter .verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new( FailParams::new(
Some(&self.time_stamp), Some(self.stamp_helper.stamp()),
&hk_err::COLLECTION_INTERVAL_MISSING, &hk_err::COLLECTION_INTERVAL_MISSING,
None, None,
), ),
@ -242,28 +298,24 @@ impl PusReceiver {
} }
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let send_start_failure = |verif_reporter: &mut StdVerifReporterWithSender, let send_start_failure = |vr: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7], timestamp: &[u8],
failure_code: &ResultU16, failure_code: &ResultU16,
failure_data: Option<&[u8]>| { failure_data: Option<&[u8]>| {
verif_reporter vr.start_failure(
.start_failure(
token, token,
FailParams::new(Some(timestamp), failure_code, failure_data), FailParams::new(Some(timestamp), failure_code, failure_data),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
}; };
let send_start_acceptance = |verif_reporter: &mut StdVerifReporterWithSender, let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| {
timestamp: &[u8; 7]| { vr.start_success(token, Some(timestamp))
verif_reporter
.start_success(token, Some(timestamp))
.expect("Sending start success TM failed") .expect("Sending start success TM failed")
}; };
if pus_tc.user_data().is_none() { if pus_tc.user_data().is_none() {
self.update_time_stamp();
send_start_failure( send_start_failure(
&mut self.verif_reporter, &mut self.tm_args.verif_reporter,
&self.time_stamp, self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA, &tmtc_err::NOT_ENOUGH_APP_DATA,
None, None,
); );
@ -271,10 +323,9 @@ impl PusReceiver {
} }
let app_data = pus_tc.user_data().unwrap(); let app_data = pus_tc.user_data().unwrap();
if app_data.len() < 4 { if app_data.len() < 4 {
self.update_time_stamp();
send_start_failure( send_start_failure(
&mut self.verif_reporter, &mut self.tm_args.verif_reporter,
&self.time_stamp, self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA, &tmtc_err::NOT_ENOUGH_APP_DATA,
None, None,
); );
@ -283,9 +334,11 @@ impl PusReceiver {
let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap())); let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
match PusPacket::subservice(pus_tc).try_into() { match PusPacket::subservice(pus_tc).try_into() {
Ok(event::Subservice::TcEnableEventGeneration) => { Ok(event::Subservice::TcEnableEventGeneration) => {
self.update_time_stamp(); let start_token = send_start_acceptance(
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp); &mut self.tm_args.verif_reporter,
self.event_request_tx self.stamp_helper.stamp(),
);
self.tc_args.event_request_tx
.send(EventRequestWithToken { .send(EventRequestWithToken {
request: EventRequest::Enable(event_id), request: EventRequest::Enable(event_id),
token: start_token, token: start_token,
@ -293,9 +346,11 @@ impl PusReceiver {
.expect("Sending event request failed"); .expect("Sending event request failed");
} }
Ok(event::Subservice::TcDisableEventGeneration) => { Ok(event::Subservice::TcDisableEventGeneration) => {
self.update_time_stamp(); let start_token = send_start_acceptance(
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp); &mut self.tm_args.verif_reporter,
self.event_request_tx self.stamp_helper.stamp(),
);
self.tc_args.event_request_tx
.send(EventRequestWithToken { .send(EventRequestWithToken {
request: EventRequest::Disable(event_id), request: EventRequest::Disable(event_id),
token: start_token, token: start_token,
@ -303,10 +358,9 @@ impl PusReceiver {
.expect("Sending event request failed"); .expect("Sending event request failed");
} }
_ => { _ => {
self.update_time_stamp();
send_start_failure( send_start_failure(
&mut self.verif_reporter, &mut self.tm_args.verif_reporter,
&self.time_stamp, self.stamp_helper.stamp(),
&tmtc_err::INVALID_PUS_SUBSERVICE, &tmtc_err::INVALID_PUS_SUBSERVICE,
None, None,
); );
@ -315,117 +369,214 @@ impl PusReceiver {
} }
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() { let subservice = match pus_11_generic_tc_check(pus_tc) {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
self.update_time_stamp();
let subservice: scheduling::Subservice = match pus_tc.subservice().try_into() {
Ok(subservice) => subservice, Ok(subservice) => subservice,
Err(_) => { Err(e) => match e {
self.verif_reporter GenericTcCheckError::NotEnoughAppData => {
self.tm_args
.verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new( FailParams::new(
Some(&self.time_stamp), Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA, &tmtc_err::NOT_ENOUGH_APP_DATA,
None, None,
), ),
) )
.expect("Sending start failure TM failed"); .expect("could not sent verification error");
return; return;
} }
GenericTcCheckError::InvalidSubservice => {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("could not sent verification error");
return;
}
},
}; };
match subservice { match subservice {
scheduling::Subservice::TcEnableScheduling => { scheduling::Subservice::TcEnableScheduling => {
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut(); let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler.enable(); scheduler.enable();
if scheduler.is_enabled() { if scheduler.is_enabled() {
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success"); .expect("Error sending completion success");
} else { } else {
panic!("Failed to enable scheduler"); panic!("Failed to enable scheduler");
} }
drop(scheduler);
} }
scheduling::Subservice::TcDisableScheduling => { scheduling::Subservice::TcDisableScheduling => {
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut(); let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler.disable(); scheduler.disable();
if !scheduler.is_enabled() { if !scheduler.is_enabled() {
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success"); .expect("Error sending completion success");
} else { } else {
panic!("Failed to disable scheduler"); panic!("Failed to disable scheduler");
} }
drop(scheduler);
} }
scheduling::Subservice::TcResetScheduling => { scheduling::Subservice::TcResetScheduling => {
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let mut pool = self let mut pool = self
.tc_args
.tc_source .tc_source
.tc_store .tc_store
.pool .pool
.write() .write()
.expect("Locking pool failed"); .expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut(); let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler scheduler
.reset(pool.as_mut()) .reset(pool.as_mut())
.expect("Error resetting TC Pool"); .expect("Error resetting TC Pool");
drop(scheduler); drop(scheduler);
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success"); .expect("Error sending completion success");
} }
scheduling::Subservice::TcInsertActivity => { scheduling::Subservice::TcInsertActivity => {
let start_token = self let start_token = self
.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(&self.time_stamp)) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("error sending start success");
let mut pool = self let mut pool = self
.tc_args
.tc_source .tc_source
.tc_store .tc_store
.pool .pool
.write() .write()
.expect("Locking pool failed"); .expect("locking pool failed");
let mut scheduler = self.scheduler.borrow_mut(); let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler scheduler
.insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut()) .insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
.expect("TODO: panic message"); .expect("insertion of activity into pool failed");
drop(scheduler); drop(scheduler);
self.verif_reporter self.tm_args
.completion_success(start_token, Some(&self.time_stamp)) .verif_reporter
.expect("Error sending completion success"); .completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("sending completion success failed");
} }
_ => {} _ => {}
} }
} }
fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let mut app_data_len = 0;
let app_data = pus_tc.user_data();
if app_data.is_some() {
app_data_len = pus_tc.user_data().unwrap().len();
}
if app_data_len < 4 {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
Some(format!("expected {} bytes, found {}", 4, app_data_len).as_bytes()),
),
)
.expect("Sending start failure TM failed");
}
let app_data = app_data.unwrap();
let subservice = mode::Subservice::try_from(PusPacket::subservice(pus_tc));
if let Ok(subservice) = subservice {
match subservice {
Subservice::TcSetMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
let min_len = ModeAndSubmode::raw_len() + 4;
if app_data_len < min_len {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
Some(
format!(
"expected {} bytes, found {}",
min_len, app_data_len
)
.as_bytes(),
),
),
)
.expect("Sending start failure TM failed");
}
// Should never fail after size check
let mode_submode = ModeAndSubmode::from_be_bytes(
app_data[4..4 + ModeAndSubmode::raw_len()]
.try_into()
.unwrap(),
)
.unwrap();
let mode_request = Request::ModeRequest(ModeRequest::SetMode(
ModeCommand::new(target_id, mode_submode),
));
match self.tc_args.request_map.get(&target_id) {
None => {}
Some(sender_to_recipient) => {
sender_to_recipient
.send(RequestWithToken(mode_request, token))
.expect("sending mode request failed");
}
}
}
Subservice::TcReadMode => {}
Subservice::TcAnnounceMode => {}
Subservice::TcAnnounceModeRecursive => {}
Subservice::TmModeReply => {}
Subservice::TmCantReachMode => {}
Subservice::TmWrongModeReply => {}
}
} else {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
Some(&[PusPacket::subservice(pus_tc)]),
),
)
.expect("Sending start failure TM failed");
}
}
} }

View File

@ -1,9 +1,12 @@
use satrs_core::hk::HkRequest; use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken}; use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[non_exhaustive]
pub enum Request { pub enum Request {
HkRequest(HkRequest), HkRequest(HkRequest),
ModeRequest(ModeRequest),
} }
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]