started adding mode request handler in PUS handler

This commit is contained in:
Robin Müller 2023-02-15 02:12:00 +01:00
parent d960c089fd
commit 802333cf3e
No known key found for this signature in database
GPG Key ID: BE6480244DFE612C
5 changed files with 187 additions and 103 deletions

View File

@ -88,6 +88,12 @@ mod alloc_mod {
impl_downcast!(EcssTmSender assoc Error);
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum GenericTcCheckError {
NotEnoughAppData,
InvalidSubservice,
}
pub(crate) fn source_buffer_large_enough(cap: usize, len: usize) -> Result<(), EcssTmError> {
if len > cap {
return Err(EcssTmError::ByteConversionError(

View File

@ -72,8 +72,11 @@
//! The [integration test](https://egit.irs.uni-stuttgart.de/rust/fsrc-launchpad/src/branch/main/fsrc-core/tests/verification_test.rs)
//! for the verification module contains examples how this module could be used in a more complex
//! context involving multiple threads
use crate::pus::{source_buffer_large_enough, EcssTmError, EcssTmErrorWithSend, EcssTmSenderCore};
use core::fmt::{Display, Formatter};
use crate::pus::{
source_buffer_large_enough, EcssTmError, EcssTmErrorWithSend, EcssTmSenderCore,
GenericTcCheckError,
};
use core::fmt::{Debug, Display, Formatter};
use core::hash::{Hash, Hasher};
use core::marker::PhantomData;
use core::mem::size_of;
@ -81,7 +84,7 @@ use core::mem::size_of;
use delegate::delegate;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use spacepackets::ecss::EcssEnumeration;
use spacepackets::ecss::{scheduling, EcssEnumeration, PusPacket};
use spacepackets::tc::PusTc;
use spacepackets::tm::{PusTm, PusTmSecondaryHeader};
use spacepackets::{CcsdsPacket, PacketId, PacketSequenceCtrl};
@ -1366,6 +1369,21 @@ mod stdmod {
}
}
pub fn pus_11_generic_tc_check(
pus_tc: &PusTc,
) -> Result<scheduling::Subservice, GenericTcCheckError> {
if pus_tc.user_data().is_none() {
return Err(GenericTcCheckError::NotEnoughAppData);
}
let subservice: scheduling::Subservice = match pus_tc.subservice().try_into() {
Ok(subservice) => subservice,
Err(_) => {
return Err(GenericTcCheckError::InvalidSubservice);
}
};
Ok(subservice)
}
#[cfg(test)]
mod tests {
use crate::pool::{LocalPool, PoolCfg, SharedPool};

View File

@ -10,6 +10,7 @@ crossbeam-channel = "0.5"
delegate = "0.9"
zerocopy = "0.6"
csv = "1"
num_enum = "0.5"
[dependencies.satrs-core]
path = "../satrs-core"

View File

@ -1,9 +1,17 @@
use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs_core::events::{EventU32TypedSev, SeverityInfo};
use std::net::Ipv4Addr;
use satrs_mib::res_code::{ResultU16, ResultU16Info};
use satrs_mib::resultcode;
#[derive(Copy, Clone, PartialEq, Eq, Debug, TryFromPrimitive, IntoPrimitive)]
#[repr(u8)]
pub enum CustomPusServiceId {
Mode = 200,
Health = 201,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum RequestTargetId {
AcsSubsystem = 1,
@ -28,6 +36,8 @@ pub mod tmtc_err {
pub const INVALID_PUS_SERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 0);
#[resultcode]
pub const INVALID_PUS_SUBSERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 1);
#[resultcode]
pub const PUS_SERVICE_NOT_IMPLEMENTED: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
#[resultcode(info = "Not enough data inside the TC application data field")]
pub const NOT_ENOUGH_APP_DATA: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);

View File

@ -4,24 +4,26 @@ use satrs_core::events::EventU32;
use satrs_core::hk::{CollectionIntervalFactor, HkRequest};
use satrs_core::params::Params;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::event;
use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken};
use satrs_core::pus::hk;
use satrs_core::pus::scheduling::PusScheduler;
use satrs_core::pus::verification::{
FailParams, StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
pus_11_generic_tc_check, FailParams, StdVerifReporterWithSender, TcStateAccepted,
VerificationToken,
};
use satrs_core::pus::{event, GenericTcCheckError};
use satrs_core::res_code::ResultU16;
use satrs_core::spacepackets::ecss::scheduling;
use satrs_core::spacepackets::ecss::{scheduling, PusServiceId};
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider};
use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader,
};
use satrs_example::{hk_err, tmtc_err, TEST_EVENT};
use satrs_example::{hk_err, tmtc_err, CustomPusServiceId, TEST_EVENT};
use std::cell::RefCell;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::rc::Rc;
use std::sync::mpsc::Sender;
@ -32,11 +34,10 @@ pub struct PusReceiver {
pub verif_reporter: StdVerifReporterWithSender,
#[allow(dead_code)]
tc_source: PusTcSource,
stamp_helper: TimeStampHelper,
event_request_tx: Sender<EventRequestWithToken>,
event_sender: Sender<(EventU32, Option<Params>)>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
stamper: TimeProvider,
time_stamp: [u8; 7],
scheduler: Rc<RefCell<PusScheduler>>,
}
@ -59,6 +60,33 @@ pub struct PusTcArgs {
pub scheduler: Rc<RefCell<PusScheduler>>,
}
struct TimeStampHelper {
stamper: TimeProvider,
time_stamp: [u8; 7],
}
impl TimeStampHelper {
pub fn new() -> Self {
Self {
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
}
}
pub fn stamp(&self) -> &[u8] {
&self.time_stamp
}
pub fn update_from_now(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
}
impl PusReceiver {
pub fn new(apid: u16, tm_arguments: PusTmArgs, tc_arguments: PusTcArgs) -> Self {
Self {
@ -70,8 +98,7 @@ impl PusReceiver {
event_request_tx: tc_arguments.event_request_tx,
event_sender: tc_arguments.event_sender,
request_map: tc_arguments.request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
stamp_helper: TimeStampHelper::new(),
scheduler: tc_arguments.scheduler,
}
}
@ -87,27 +114,51 @@ impl PusServiceProvider for PusReceiver {
pus_tc: &PusTc,
) -> Result<(), Self::Error> {
let init_token = self.verif_reporter.add_tc(pus_tc);
self.update_time_stamp();
self.stamp_helper.update_from_now();
let accepted_token = self
.verif_reporter
.acceptance_success(init_token, Some(&self.time_stamp))
.acceptance_success(init_token, Some(self.stamp_helper.stamp()))
.expect("Acceptance success failure");
if service == 17 {
self.handle_test_service(pus_tc, accepted_token);
} else if service == 5 {
self.handle_event_request(pus_tc, accepted_token);
} else if service == 3 {
self.handle_hk_request(pus_tc, accepted_token);
} else if service == 11 {
self.handle_scheduled_tc(pus_tc, accepted_token);
} else {
self.update_time_stamp();
self.verif_reporter
.start_failure(
accepted_token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::INVALID_PUS_SERVICE, None),
)
.expect("Start failure verification failed")
let service = PusServiceId::try_from(service);
match service {
Ok(standard_service) => match standard_service {
PusServiceId::Test => self.handle_test_service(pus_tc, accepted_token),
PusServiceId::Housekeeping => self.handle_hk_request(pus_tc, accepted_token),
PusServiceId::Event => self.handle_event_request(pus_tc, accepted_token),
PusServiceId::Scheduling => self.handle_scheduled_tc(pus_tc, accepted_token),
_ => self
.verif_reporter
.start_failure(
accepted_token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::PUS_SERVICE_NOT_IMPLEMENTED,
Some(&[standard_service as u8]),
),
)
.expect("Start failure verification failed"),
},
Err(e) => {
if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) {
match custom_service {
CustomPusServiceId::Mode => {
self.handle_mode_service(pus_tc, accepted_token)
}
CustomPusServiceId::Health => {}
}
} else {
self.verif_reporter
.start_failure(
accepted_token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
Some(&[e.number]),
),
)
.expect("Start failure verification failed")
}
}
}
Ok(())
}
@ -121,7 +172,7 @@ impl PusReceiver {
println!("Sending ping reply PUS TM[17,2]");
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
let addr = self.tm_store.add_pus_tm(&ping_reply);
@ -129,29 +180,27 @@ impl PusReceiver {
.send(addr)
.expect("Sending TM to TM funnel failed");
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
128 => {
self.update_time_stamp();
self.event_sender
.send((TEST_EVENT.into(), None))
.expect("Sending test event failed");
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
_ => {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
@ -161,22 +210,16 @@ impl PusReceiver {
}
}
fn update_time_stamp(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
@ -188,19 +231,24 @@ impl PusReceiver {
} else {
&hk_err::UNIQUE_ID_MISSING
};
self.update_time_stamp();
self.verif_reporter
.start_failure(token, FailParams::new(Some(&self.time_stamp), err, None))
.start_failure(
token,
FailParams::new(Some(self.stamp_helper.stamp()), err, None),
)
.expect("Sending start failure TM failed");
return;
}
let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &hk_err::UNKNOWN_TARGET_ID, None),
FailParams::new(
Some(self.stamp_helper.stamp()),
&hk_err::UNKNOWN_TARGET_ID,
None,
),
)
.expect("Sending start failure TM failed");
return;
@ -221,12 +269,11 @@ impl PusReceiver {
== hk::Subservice::TcModifyHkCollectionInterval as u8
{
if user_data.len() < 12 {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
Some(self.stamp_helper.stamp()),
&hk_err::COLLECTION_INTERVAL_MISSING,
None,
),
@ -243,7 +290,7 @@ impl PusReceiver {
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let send_start_failure = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7],
timestamp: &[u8],
failure_code: &ResultU16,
failure_data: Option<&[u8]>| {
verif_reporter
@ -254,16 +301,15 @@ impl PusReceiver {
.expect("Sending start failure TM failed");
};
let send_start_acceptance = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7]| {
timestamp: &[u8]| {
verif_reporter
.start_success(token, Some(timestamp))
.expect("Sending start success TM failed")
};
if pus_tc.user_data().is_none() {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
@ -271,10 +317,9 @@ impl PusReceiver {
}
let app_data = pus_tc.user_data().unwrap();
if app_data.len() < 4 {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
@ -283,8 +328,8 @@ impl PusReceiver {
let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
match PusPacket::subservice(pus_tc).try_into() {
Ok(event::Subservice::TcEnableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
let start_token =
send_start_acceptance(&mut self.verif_reporter, self.stamp_helper.stamp());
self.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Enable(event_id),
@ -293,8 +338,8 @@ impl PusReceiver {
.expect("Sending event request failed");
}
Ok(event::Subservice::TcDisableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
let start_token =
send_start_acceptance(&mut self.verif_reporter, self.stamp_helper.stamp());
self.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Disable(event_id),
@ -303,10 +348,9 @@ impl PusReceiver {
.expect("Sending event request failed");
}
_ => {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
self.stamp_helper.stamp(),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
);
@ -315,75 +359,74 @@ impl PusReceiver {
}
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
self.update_time_stamp();
let subservice: scheduling::Subservice = match pus_tc.subservice().try_into() {
let subservice = match pus_11_generic_tc_check(pus_tc) {
Ok(subservice) => subservice,
Err(_) => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
Err(e) => match e {
GenericTcCheckError::NotEnoughAppData => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("could not sent verification error");
return;
}
GenericTcCheckError::InvalidSubservice => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("could not sent verification error");
return;
}
},
};
match subservice {
scheduling::Subservice::TcEnableScheduling => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.enable();
if scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
} else {
panic!("Failed to enable scheduler");
}
drop(scheduler);
}
scheduling::Subservice::TcDisableScheduling => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.disable();
if !scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
} else {
panic!("Failed to disable scheduler");
}
drop(scheduler);
}
scheduling::Subservice::TcResetScheduling => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut pool = self
@ -400,13 +443,13 @@ impl PusReceiver {
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
scheduling::Subservice::TcInsertActivity => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut pool = self
@ -422,10 +465,16 @@ impl PusReceiver {
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
_ => {}
}
}
fn handle_mode_service(&mut self, _pus_tc: &PusTc, _token: VerificationToken<TcStateAccepted>) {
//match pus_tc.subservice() {
//}
}
}