Major refactoring and update of PUS module

This commit is contained in:
2024-04-04 15:18:53 +02:00
parent 344fe6a4c0
commit df2733a176
75 changed files with 9295 additions and 4764 deletions

View File

@ -17,11 +17,15 @@ zerocopy = "0.6"
csv = "1"
num_enum = "0.7"
thiserror = "1"
lazy_static = "1"
strum = { version = "0.26", features = ["derive"] }
derive-new = "0.5"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
[dependencies.satrs]
# version = "0.2.0-rc.0"
path = "../satrs"
features = ["test_util"]
[dependencies.satrs-mib]
version = "0.1.1"
@ -30,3 +34,6 @@ path = "../satrs-mib"
[features]
dyn_tmtc = []
default = ["dyn_tmtc"]
[dev-dependencies]
env_logger = "0.11"

View File

@ -4,11 +4,12 @@ import dataclasses
import enum
import struct
from spacepackets.ecss.tc import PacketId, PacketType
EXAMPLE_PUS_APID = 0x02
EXAMPLE_PUS_PACKET_ID_TM = PacketId(PacketType.TM, True, EXAMPLE_PUS_APID)
TM_PACKET_IDS = [EXAMPLE_PUS_PACKET_ID_TM]
class Apid(enum.IntEnum):
SCHED = 1
GENERIC_PUS = 2
ACS = 3
CFDP = 4
class EventSeverity(enum.IntEnum):
@ -36,8 +37,8 @@ class EventU32:
)
class RequestTargetId(enum.IntEnum):
ACS = 1
class AcsId(enum.IntEnum):
MGM_0 = 0
class AcsHkIds(enum.IntEnum):

View File

@ -3,10 +3,11 @@
import logging
import sys
import time
from typing import Optional
from typing import Any, Optional
from prompt_toolkit.history import History
from prompt_toolkit.history import FileHistory
from spacepackets.ccsds import PacketId, PacketType
import tmtccmd
from spacepackets.ecss import PusTelemetry, PusVerificator
from spacepackets.ecss.pus_17_test import Service17Tm
@ -16,7 +17,7 @@ from spacepackets.ccsds.time import CdsShortTimestamp
from tmtccmd import TcHandlerBase, ProcedureParamsWrapper
from tmtccmd.core.base import BackendRequest
from tmtccmd.pus import VerificationWrapper
from tmtccmd.tmtc import CcsdsTmHandler, SpecificApidHandlerBase
from tmtccmd.tmtc import CcsdsTmHandler, GenericApidHandlerBase
from tmtccmd.com import ComInterface
from tmtccmd.config import (
CmdTreeNode,
@ -46,7 +47,7 @@ from tmtccmd.util.obj_id import ObjectIdDictT
import pus_tc
from common import EXAMPLE_PUS_APID, TM_PACKET_IDS, EventU32
from common import Apid, EventU32
_LOGGER = logging.getLogger()
@ -62,10 +63,13 @@ class SatRsConfigHook(HookBase):
)
assert self.cfg_path is not None
packet_id_list = []
for apid in Apid:
packet_id_list.append(PacketId(PacketType.TM, True, apid))
cfg = create_com_interface_cfg_default(
com_if_key=com_if_key,
json_cfg_path=self.cfg_path,
space_packet_ids=TM_PACKET_IDS,
space_packet_ids=packet_id_list,
)
assert cfg is not None
return create_com_interface_default(cfg)
@ -85,19 +89,19 @@ class SatRsConfigHook(HookBase):
return get_core_object_ids()
class PusHandler(SpecificApidHandlerBase):
class PusHandler(GenericApidHandlerBase):
def __init__(
self,
file_logger: logging.Logger,
verif_wrapper: VerificationWrapper,
raw_logger: RawTmtcTimedLogWrapper,
):
super().__init__(EXAMPLE_PUS_APID, None)
super().__init__(None)
self.file_logger = file_logger
self.raw_logger = raw_logger
self.verif_wrapper = verif_wrapper
def handle_tm(self, packet: bytes, _user_args: any):
def handle_tm(self, apid: int, packet: bytes, _user_args: Any):
try:
pus_tm = PusTelemetry.unpack(packet, time_reader=CdsShortTimestamp.empty())
except ValueError as e:
@ -177,7 +181,7 @@ class TcHandler(TcHandlerBase):
tc_sched_timestamp_len=CdsShortTimestamp.TIMESTAMP_SIZE,
seq_cnt_provider=seq_count_provider,
pus_verificator=self.verif_wrapper.pus_verificator,
default_pus_apid=EXAMPLE_PUS_APID,
default_pus_apid=None,
)
def send_cb(self, send_params: SendCbParams):
@ -221,7 +225,6 @@ def main():
post_args_wrapper.set_params_without_prompts(proc_wrapper)
else:
post_args_wrapper.set_params_with_prompts(proc_wrapper)
params.apid = EXAMPLE_PUS_APID
setup_args = SetupWrapper(
hook_obj=hook_obj, setup_params=params, proc_param_wrapper=proc_wrapper
)
@ -233,8 +236,9 @@ def main():
verification_wrapper = VerificationWrapper(verificator, _LOGGER, file_logger)
# Create primary TM handler and add it to the CCSDS Packet Handler
tm_handler = PusHandler(file_logger, verification_wrapper, raw_logger)
ccsds_handler = CcsdsTmHandler(generic_handler=None)
ccsds_handler.add_apid_handler(tm_handler)
ccsds_handler = CcsdsTmHandler(generic_handler=tm_handler)
# TODO: We could add the CFDP handler for the CFDP APID at a later stage.
# ccsds_handler.add_apid_handler(tm_handler)
# Create TC handler
seq_count_provider = PusFileSeqCountProvider()

View File

@ -1,27 +1,58 @@
import datetime
import struct
import logging
from spacepackets.ccsds import CdsShortTimestamp
from spacepackets.ecss import PusTelecommand
from tmtccmd.config import CmdTreeNode
from tmtccmd.pus.tc.s200_fsfw_mode import Mode
from tmtccmd.tmtc import DefaultPusQueueHelper
from tmtccmd.pus.s11_tc_sched import create_time_tagged_cmd
from tmtccmd.pus.tc.s3_fsfw_hk import create_request_one_hk_command
from tmtccmd.pus.s200_fsfw_mode import Subservice as ModeSubservice
from common import (
EXAMPLE_PUS_APID,
make_addressable_id,
RequestTargetId,
AcsHkIds,
)
from common import AcsId, Apid
_LOGGER = logging.getLogger(__name__)
def create_set_mode_cmd(
apid: int, unique_id: int, mode: int, submode: int
) -> PusTelecommand:
app_data = bytearray()
app_data.extend(struct.pack("!I", unique_id))
app_data.extend(struct.pack("!I", mode))
app_data.extend(struct.pack("!H", submode))
return PusTelecommand(
service=200,
subservice=ModeSubservice.TC_MODE_COMMAND,
apid=apid,
app_data=app_data,
)
def create_cmd_definition_tree() -> CmdTreeNode:
root_node = CmdTreeNode.root_node()
hk_node = CmdTreeNode("hk", "Housekeeping Node", hide_children_for_print=True)
hk_node.add_child(CmdTreeNode("one_shot_hk", "Request One Shot HK set"))
hk_node.add_child(
CmdTreeNode("enable", "Enable periodic housekeeping data generation")
)
hk_node.add_child(
CmdTreeNode("disable", "Disable periodic housekeeping data generation")
)
mode_node = CmdTreeNode("mode", "Mode Node", hide_children_for_print=True)
set_mode_node = CmdTreeNode(
"set_mode", "Set Node", hide_children_which_are_leaves=True
)
set_mode_node.add_child(CmdTreeNode("off", "Set OFF Mode"))
set_mode_node.add_child(CmdTreeNode("on", "Set ON Mode"))
set_mode_node.add_child(CmdTreeNode("normal", "Set NORMAL Mode"))
mode_node.add_child(set_mode_node)
mode_node.add_child(CmdTreeNode("read_mode", "Read Mode"))
test_node = CmdTreeNode("test", "Test Node")
test_node.add_child(CmdTreeNode("ping", "Send PUS ping TC"))
test_node.add_child(CmdTreeNode("trigger_event", "Send PUS test to trigger event"))
@ -37,7 +68,9 @@ def create_cmd_definition_tree() -> CmdTreeNode:
acs_node = CmdTreeNode("acs", "ACS Subsystem Node")
mgm_node = CmdTreeNode("mgms", "MGM devices node")
mgm_node.add_child(CmdTreeNode("one_shot_hk", "Request one shot HK"))
mgm_node.add_child(mode_node)
mgm_node.add_child(hk_node)
acs_node.add_child(mgm_node)
root_node.add_child(acs_node)
@ -54,10 +87,14 @@ def pack_pus_telecommands(q: DefaultPusQueueHelper, cmd_path: str):
assert len(cmd_path_list) >= 2
if cmd_path_list[1] == "ping":
q.add_log_cmd("Sending PUS ping telecommand")
return q.add_pus_tc(PusTelecommand(service=17, subservice=1))
return q.add_pus_tc(
PusTelecommand(apid=Apid.GENERIC_PUS, service=17, subservice=1)
)
elif cmd_path_list[1] == "trigger_event":
q.add_log_cmd("Triggering test event")
return q.add_pus_tc(PusTelecommand(service=17, subservice=128))
return q.add_pus_tc(
PusTelecommand(apid=Apid.GENERIC_PUS, service=17, subservice=128)
)
if cmd_path_list[0] == "scheduler":
assert len(cmd_path_list) >= 2
if cmd_path_list[1] == "schedule_ping_10_secs_ahead":
@ -69,17 +106,38 @@ def pack_pus_telecommands(q: DefaultPusQueueHelper, cmd_path: str):
create_time_tagged_cmd(
time_stamp,
PusTelecommand(service=17, subservice=1),
apid=EXAMPLE_PUS_APID,
apid=Apid.SCHED,
)
)
if cmd_path_list[0] == "acs":
assert len(cmd_path_list) >= 2
if cmd_path_list[1] == "mgm":
if cmd_path_list[1] == "mgms":
assert len(cmd_path_list) >= 3
if cmd_path_list[2] == "one_shot_hk":
q.add_log_cmd("Sending HK one shot request")
q.add_pus_tc(
create_request_one_hk_command(
make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET)
if cmd_path_list[2] == "hk":
if cmd_path_list[3] == "one_shot_hk":
q.add_log_cmd("Sending HK one shot request")
# TODO: Fix
# q.add_pus_tc(
# create_request_one_hk_command(
# make_addressable_id(Apid.ACS, AcsId.MGM_SET)
# )
# )
if cmd_path_list[2] == "mode":
if cmd_path_list[3] == "set_mode":
handle_set_mode_cmd(
q, "MGM 0", cmd_path_list[4], Apid.ACS, AcsId.MGM_0
)
)
def handle_set_mode_cmd(
q: DefaultPusQueueHelper, target_str: str, mode_str: str, apid: int, unique_id: int
):
if mode_str == "off":
q.add_log_cmd(f"Sending Mode OFF to {target_str}")
q.add_pus_tc(create_set_mode_cmd(apid, unique_id, Mode.OFF, 0))
elif mode_str == "on":
q.add_log_cmd(f"Sending Mode ON to {target_str}")
q.add_pus_tc(create_set_mode_cmd(apid, unique_id, Mode.ON, 0))
elif mode_str == "normal":
q.add_log_cmd(f"Sending Mode NORMAL to {target_str}")
q.add_pus_tc(create_set_mode_cmd(apid, unique_id, Mode.NORMAL, 0))

View File

@ -1,118 +0,0 @@
use std::sync::mpsc::{self, TryRecvError};
use log::{info, warn};
use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::{EcssTmSender, PusTmWrapper};
use satrs::request::TargetAndApidId;
use satrs::spacepackets::ecss::hk::Subservice as HkSubservice;
use satrs::{
hk::HkRequest,
spacepackets::{
ecss::tm::{PusTmCreator, PusTmSecondaryHeader},
time::cds::{CdsTime, DaysLen16Bits},
SequenceFlags, SpHeader,
},
};
use satrs_example::config::{RequestTargetId, PUS_APID};
use crate::{
hk::{AcsHkIds, HkUniqueId},
requests::{Request, RequestWithToken},
update_time,
};
pub struct AcsTask<VerificationReporter: VerificationReportingProvider> {
timestamp: [u8; 7],
time_provider: CdsTime<DaysLen16Bits>,
verif_reporter: VerificationReporter,
tm_sender: Box<dyn EcssTmSender>,
request_rx: mpsc::Receiver<RequestWithToken>,
}
impl<VerificationReporter: VerificationReportingProvider> AcsTask<VerificationReporter> {
pub fn new(
tm_sender: impl EcssTmSender,
request_rx: mpsc::Receiver<RequestWithToken>,
verif_reporter: VerificationReporter,
) -> Self {
Self {
timestamp: [0; 7],
time_provider: CdsTime::new_with_u16_days(0, 0),
verif_reporter,
tm_sender: Box::new(tm_sender),
request_rx,
}
}
fn handle_hk_request(&mut self, target_id: u32, unique_id: u32) {
assert_eq!(target_id, RequestTargetId::AcsSubsystem as u32);
if unique_id == AcsHkIds::TestMgmSet as u32 {
let mut sp_header = SpHeader::tm(PUS_APID, SequenceFlags::Unsegmented, 0, 0).unwrap();
let sec_header = PusTmSecondaryHeader::new_simple(
3,
HkSubservice::TmHkPacket as u8,
&self.timestamp,
);
let mut buf: [u8; 8] = [0; 8];
let hk_id = HkUniqueId::new(target_id, unique_id);
hk_id.write_to_be_bytes(&mut buf).unwrap();
let pus_tm = PusTmCreator::new(&mut sp_header, sec_header, &buf, true);
self.tm_sender
.send_tm(PusTmWrapper::Direct(pus_tm))
.expect("Sending HK TM failed");
}
// TODO: Verification failure for invalid unique IDs.
}
pub fn try_reading_one_request(&mut self) -> bool {
match self.request_rx.try_recv() {
Ok(request) => {
info!(
"ACS thread: Received HK request {:?}",
request.targeted_request
);
let target_and_apid_id = TargetAndApidId::from(request.targeted_request.target_id);
match request.targeted_request.request {
Request::Hk(hk_req) => match hk_req {
HkRequest::OneShot(unique_id) => {
self.handle_hk_request(target_and_apid_id.target(), unique_id)
}
HkRequest::Enable(_) => {}
HkRequest::Disable(_) => {}
HkRequest::ModifyCollectionInterval(_, _) => {}
},
Request::Mode(_mode_req) => {
warn!("mode request handling not implemented yet")
}
Request::Action(_action_req) => {
warn!("action request handling not implemented yet")
}
}
let started_token = self
.verif_reporter
.start_success(request.token, &self.timestamp)
.expect("Sending start success failed");
self.verif_reporter
.completion_success(started_token, &self.timestamp)
.expect("Sending completion success failed");
true
}
Err(e) => match e {
TryRecvError::Empty => false,
TryRecvError::Disconnected => {
warn!("ACS thread: Message Queue TX disconnected!");
false
}
},
}
}
pub fn periodic_operation(&mut self) {
update_time(&mut self.time_provider, &mut self.timestamp);
loop {
if !self.try_reading_one_request() {
break;
}
}
}
}

View File

@ -0,0 +1,284 @@
use derive_new::new;
use satrs::hk::{HkRequest, HkRequestVariant};
use satrs::queue::{GenericSendError, GenericTargetedMessagingError};
use satrs::spacepackets::ecss::hk;
use satrs::spacepackets::ecss::tm::{PusTmCreator, PusTmSecondaryHeader};
use satrs::spacepackets::SpHeader;
use satrs_example::{DeviceMode, TimeStampHelper};
use std::sync::mpsc::{self};
use std::sync::{Arc, Mutex};
use satrs::mode::{
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequest, ModeRequestHandler,
};
use satrs::pus::{EcssTmSenderCore, PusTmVariant};
use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
use satrs_example::config::components::PUS_MODE_SERVICE;
use crate::pus::hk::{HkReply, HkReplyVariant};
use crate::requests::CompositeRequest;
use serde::{Deserialize, Serialize};
const GAUSS_TO_MICROTESLA_FACTOR: f32 = 100.0;
// This is the selected resoltion for the STM LIS3MDL device for the 4 Gauss sensitivity setting.
const FIELD_LSB_PER_GAUSS_4_SENS: f32 = 1.0 / 6842.0;
pub trait SpiInterface {
type Error;
fn transfer(&mut self, tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error>;
}
#[derive(Default)]
pub struct SpiDummyInterface {
pub dummy_val_0: i16,
pub dummy_val_1: i16,
pub dummy_val_2: i16,
}
impl SpiInterface for SpiDummyInterface {
type Error = ();
fn transfer(&mut self, _tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error> {
rx[0..2].copy_from_slice(&self.dummy_val_0.to_be_bytes());
rx[2..4].copy_from_slice(&self.dummy_val_1.to_be_bytes());
rx[4..6].copy_from_slice(&self.dummy_val_2.to_be_bytes());
Ok(())
}
}
#[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
pub struct MgmData {
pub valid: bool,
pub x: f32,
pub y: f32,
pub z: f32,
}
pub struct MpscModeLeafInterface {
pub request_rx: mpsc::Receiver<GenericMessage<ModeRequest>>,
pub reply_tx_to_pus: mpsc::Sender<GenericMessage<ModeReply>>,
pub reply_tx_to_parent: mpsc::Sender<GenericMessage<ModeReply>>,
}
/// Example MGM device handler strongly based on the LIS3MDL MEMS device.
#[derive(new)]
#[allow(clippy::too_many_arguments)]
pub struct MgmHandlerLis3Mdl<ComInterface: SpiInterface, TmSender: EcssTmSenderCore> {
id: UniqueApidTargetId,
dev_str: &'static str,
mode_interface: MpscModeLeafInterface,
composite_request_receiver: mpsc::Receiver<GenericMessage<CompositeRequest>>,
hk_reply_sender: mpsc::Sender<GenericMessage<HkReply>>,
tm_sender: TmSender,
com_interface: ComInterface,
shared_mgm_set: Arc<Mutex<MgmData>>,
#[new(value = "ModeAndSubmode::new(satrs_example::DeviceMode::Off as u32, 0)")]
mode_and_submode: ModeAndSubmode,
#[new(default)]
tx_buf: [u8; 12],
#[new(default)]
rx_buf: [u8; 12],
#[new(default)]
tm_buf: [u8; 16],
#[new(default)]
stamp_helper: TimeStampHelper,
}
impl<ComInterface: SpiInterface, TmSender: EcssTmSenderCore>
MgmHandlerLis3Mdl<ComInterface, TmSender>
{
pub fn periodic_operation(&mut self) {
self.stamp_helper.update_from_now();
// Handle requests.
self.handle_composite_requests();
self.handle_mode_requests();
if self.mode() == DeviceMode::Normal as u32 {
log::trace!("polling LIS3MDL sensor {}", self.dev_str);
// Communicate with the device.
let result = self.com_interface.transfer(&self.tx_buf, &mut self.rx_buf);
assert!(result.is_ok());
// Actual data begins on the second byte, similarly to how a lot of SPI devices behave.
let x_raw = i16::from_be_bytes(self.rx_buf[1..3].try_into().unwrap());
let y_raw = i16::from_be_bytes(self.rx_buf[3..5].try_into().unwrap());
let z_raw = i16::from_be_bytes(self.rx_buf[5..7].try_into().unwrap());
// Simple scaling to retrieve the float value, assuming a sensor resolution of
let mut mgm_guard = self.shared_mgm_set.lock().unwrap();
mgm_guard.x = x_raw as f32 * GAUSS_TO_MICROTESLA_FACTOR * FIELD_LSB_PER_GAUSS_4_SENS;
mgm_guard.y = y_raw as f32 * GAUSS_TO_MICROTESLA_FACTOR * FIELD_LSB_PER_GAUSS_4_SENS;
mgm_guard.z = z_raw as f32 * GAUSS_TO_MICROTESLA_FACTOR * FIELD_LSB_PER_GAUSS_4_SENS;
drop(mgm_guard);
}
}
pub fn handle_composite_requests(&mut self) {
loop {
match self.composite_request_receiver.try_recv() {
Ok(ref msg) => match &msg.message {
CompositeRequest::Hk(hk_request) => {
self.handle_hk_request(&msg.requestor_info, hk_request)
}
// TODO: This object does not have actions (yet).. Still send back completion failure
// reply.
CompositeRequest::Action(_action_req) => {}
},
Err(e) => {
if e != mpsc::TryRecvError::Empty {
log::warn!(
"{}: failed to receive composite request: {:?}",
self.dev_str,
e
);
} else {
break;
}
}
}
}
}
pub fn handle_hk_request(&mut self, requestor_info: &MessageMetadata, hk_request: &HkRequest) {
match hk_request.variant {
HkRequestVariant::OneShot => {
self.hk_reply_sender
.send(GenericMessage::new(
*requestor_info,
HkReply::new(hk_request.unique_id, HkReplyVariant::Ack),
))
.expect("failed to send HK reply");
let sec_header = PusTmSecondaryHeader::new(
3,
hk::Subservice::TmHkPacket as u8,
0,
0,
self.stamp_helper.stamp(),
);
let mgm_snapshot = *self.shared_mgm_set.lock().unwrap();
// Use binary serialization here. We want the data to be tightly packed.
self.tm_buf[0] = mgm_snapshot.valid as u8;
self.tm_buf[1..5].copy_from_slice(&mgm_snapshot.x.to_be_bytes());
self.tm_buf[5..9].copy_from_slice(&mgm_snapshot.y.to_be_bytes());
self.tm_buf[9..13].copy_from_slice(&mgm_snapshot.z.to_be_bytes());
let hk_tm = PusTmCreator::new(
SpHeader::new_from_apid(self.id.apid),
sec_header,
&self.tm_buf[0..12],
true,
);
self.tm_sender
.send_tm(self.id.id(), PusTmVariant::Direct(hk_tm))
.expect("failed to send HK TM");
}
HkRequestVariant::EnablePeriodic => todo!(),
HkRequestVariant::DisablePeriodic => todo!(),
HkRequestVariant::ModifyCollectionInterval(_) => todo!(),
}
}
pub fn handle_mode_requests(&mut self) {
loop {
// TODO: Only allow one set mode request per cycle?
match self.mode_interface.request_rx.try_recv() {
Ok(msg) => {
let result = self.handle_mode_request(msg);
// TODO: Trigger event?
if result.is_err() {
log::warn!(
"{}: mode request failed with error {:?}",
self.dev_str,
result.err().unwrap()
);
}
}
Err(e) => {
if e != mpsc::TryRecvError::Empty {
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
} else {
break;
}
}
}
}
}
}
impl<ComInterface: SpiInterface, TmSender: EcssTmSenderCore> ModeProvider
for MgmHandlerLis3Mdl<ComInterface, TmSender>
{
fn mode_and_submode(&self) -> ModeAndSubmode {
self.mode_and_submode
}
}
impl<ComInterface: SpiInterface, TmSender: EcssTmSenderCore> ModeRequestHandler
for MgmHandlerLis3Mdl<ComInterface, TmSender>
{
type Error = ModeError;
fn start_transition(
&mut self,
requestor: MessageMetadata,
mode_and_submode: ModeAndSubmode,
) -> Result<(), satrs::mode::ModeError> {
log::info!(
"{}: transitioning to mode {:?}",
self.dev_str,
mode_and_submode
);
self.mode_and_submode = mode_and_submode;
self.handle_mode_reached(Some(requestor))?;
Ok(())
}
fn announce_mode(&self, _requestor_info: Option<MessageMetadata>, _recursive: bool) {
log::info!(
"{} announcing mode: {:?}",
self.dev_str,
self.mode_and_submode
);
}
fn handle_mode_reached(
&mut self,
requestor: Option<MessageMetadata>,
) -> Result<(), Self::Error> {
self.announce_mode(requestor, false);
if let Some(requestor) = requestor {
if requestor.sender_id() != PUS_MODE_SERVICE.id() {
log::warn!(
"can not send back mode reply to sender {}",
requestor.sender_id()
);
} else {
self.send_mode_reply(requestor, ModeReply::ModeReply(self.mode_and_submode()))?;
}
}
Ok(())
}
fn send_mode_reply(
&self,
requestor: MessageMetadata,
reply: ModeReply,
) -> Result<(), Self::Error> {
if requestor.sender_id() != PUS_MODE_SERVICE.id() {
log::warn!(
"can not send back mode reply to sender {}",
requestor.sender_id()
);
}
self.mode_interface
.reply_tx_to_pus
.send(GenericMessage::new(requestor, reply))
.map_err(|_| GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected))?;
Ok(())
}
fn handle_mode_info(
&mut self,
_requestor_info: MessageMetadata,
_info: ModeAndSubmode,
) -> Result<(), Self::Error> {
Ok(())
}
}

View File

@ -0,0 +1 @@
pub mod mgm;

View File

@ -12,8 +12,7 @@ use std::time::Duration;
fn main() {
let mut buf = [0; 32];
let addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let mut sph = SpHeader::tc_unseg(0x02, 0, 0).unwrap();
let pus_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let pus_tc = PusTcCreator::new_simple(SpHeader::new_from_apid(0x02), 17, 1, &[], true);
let client = UdpSocket::bind("127.0.0.1:7302").expect("Connecting to UDP server failed");
let tc_req_id = RequestId::new(&pus_tc);
println!("Packing and sending PUS ping command TC[17,1] with request ID {tc_req_id}");

View File

@ -1,7 +1,9 @@
use satrs::pus::ReceivesEcssPusTc;
use satrs::spacepackets::{CcsdsPacket, SpHeader};
use satrs::tmtc::{CcsdsPacketHandler, ReceivesCcsdsTc};
use satrs_example::config::PUS_APID;
use satrs::ValidatorU16Id;
use satrs_example::config::components::Apid;
use satrs_example::config::APID_VALIDATOR;
#[derive(Clone)]
pub struct CcsdsReceiver<
@ -11,6 +13,16 @@ pub struct CcsdsReceiver<
pub tc_source: TcSource,
}
impl<
TcSource: ReceivesCcsdsTc<Error = E> + ReceivesEcssPusTc<Error = E> + Clone + 'static,
E: 'static,
> ValidatorU16Id for CcsdsReceiver<TcSource, E>
{
fn validate(&self, apid: u16) -> bool {
APID_VALIDATOR.contains(&apid)
}
}
impl<
TcSource: ReceivesCcsdsTc<Error = E> + ReceivesEcssPusTc<Error = E> + Clone + 'static,
E: 'static,
@ -18,27 +30,24 @@ impl<
{
type Error = E;
fn valid_apids(&self) -> &'static [u16] {
&[PUS_APID]
}
fn handle_known_apid(
fn handle_packet_with_valid_apid(
&mut self,
sp_header: &SpHeader,
tc_raw: &[u8],
) -> Result<(), Self::Error> {
if sp_header.apid() == PUS_APID {
if sp_header.apid() == Apid::Cfdp as u16 {
} else {
return self.tc_source.pass_ccsds(sp_header, tc_raw);
}
Ok(())
}
fn handle_unknown_apid(
fn handle_packet_with_unknown_apid(
&mut self,
sp_header: &SpHeader,
_tc_raw: &[u8],
) -> Result<(), Self::Error> {
println!("Unknown APID 0x{:x?} detected", sp_header.apid());
log::warn!("unknown APID 0x{:x?} detected", sp_header.apid());
Ok(())
}
}

View File

@ -1,7 +1,12 @@
use satrs::res_code::ResultU16;
use lazy_static::lazy_static;
use satrs::{
res_code::ResultU16,
spacepackets::{PacketId, PacketType},
};
use satrs_mib::res_code::ResultU16Info;
use satrs_mib::resultcode;
use std::net::Ipv4Addr;
use std::{collections::HashSet, net::Ipv4Addr};
use strum::IntoEnumIterator;
use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs::{
@ -9,8 +14,6 @@ use satrs::{
pool::{StaticMemoryPool, StaticPoolConfig},
};
pub const PUS_APID: u16 = 0x02;
#[derive(Copy, Clone, PartialEq, Eq, Debug, TryFromPrimitive, IntoPrimitive)]
#[repr(u8)]
pub enum CustomPusServiceId {
@ -29,6 +32,7 @@ pub const AOCS_APID: u16 = 1;
pub enum GroupId {
Tmtc = 0,
Hk = 1,
Mode = 2,
}
pub const OBSW_SERVER_ADDR: Ipv4Addr = Ipv4Addr::UNSPECIFIED;
@ -37,6 +41,23 @@ pub const SERVER_PORT: u16 = 7301;
pub const TEST_EVENT: EventU32TypedSev<SeverityInfo> =
EventU32TypedSev::<SeverityInfo>::const_new(0, 0);
lazy_static! {
pub static ref PACKET_ID_VALIDATOR: HashSet<PacketId> = {
let mut set = HashSet::new();
for id in components::Apid::iter() {
set.insert(PacketId::new(PacketType::Tc, true, id as u16));
}
set
};
pub static ref APID_VALIDATOR: HashSet<u16> = {
let mut set = HashSet::new();
for id in components::Apid::iter() {
set.insert(id as u16);
}
set
};
}
pub mod tmtc_err {
use super::*;
@ -53,6 +74,8 @@ pub mod tmtc_err {
pub const UNKNOWN_TARGET_ID: ResultU16 = ResultU16::new(GroupId::Tmtc as u8, 4);
#[resultcode]
pub const ROUTING_ERROR: ResultU16 = ResultU16::new(GroupId::Tmtc as u8, 5);
#[resultcode(info = "Request timeout for targeted PUS request. P1: Request ID. P2: Target ID")]
pub const REQUEST_TIMEOUT: ResultU16 = ResultU16::new(GroupId::Tmtc as u8, 6);
#[resultcode(
info = "Not enough data inside the TC application data field. Optionally includes: \
@ -92,27 +115,59 @@ pub mod hk_err {
];
}
#[allow(clippy::enum_variant_names)]
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum TmSenderId {
PusVerification = 0,
PusTest = 1,
PusEvent = 2,
PusHk = 3,
PusAction = 4,
PusSched = 5,
AllEvents = 6,
AcsSubsystem = 7,
pub mod mode_err {
use super::*;
#[resultcode]
pub const WRONG_MODE: ResultU16 = ResultU16::new(GroupId::Mode as u8, 0);
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum TcReceiverId {
PusTest = 1,
PusEvent = 2,
PusHk = 3,
PusAction = 4,
PusSched = 5,
pub mod components {
use satrs::request::UniqueApidTargetId;
use strum::EnumIter;
#[derive(Copy, Clone, PartialEq, Eq, EnumIter)]
pub enum Apid {
Sched = 1,
GenericPus = 2,
Acs = 3,
Cfdp = 4,
}
// Component IDs for components with the PUS APID.
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum PusId {
PusEventManagement = 0,
PusRouting = 1,
PusTest = 2,
PusAction = 3,
PusMode = 4,
PusHk = 5,
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum AcsId {
Mgm0 = 0,
}
pub const PUS_ACTION_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusAction as u32);
pub const PUS_EVENT_MANAGEMENT: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, 0);
pub const PUS_ROUTING_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusRouting as u32);
pub const PUS_TEST_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusTest as u32);
pub const PUS_MODE_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusMode as u32);
pub const PUS_HK_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusHk as u32);
pub const PUS_SCHED_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Sched as u16, 0);
pub const MGM_HANDLER_0: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Mgm0 as u32);
}
pub mod pool {
use super::*;
pub fn create_static_pools() -> (StaticMemoryPool, StaticMemoryPool) {

View File

@ -1,66 +1,87 @@
use std::sync::mpsc::{self};
use crate::pus::create_verification_reporter;
use satrs::event_man::{EventMessageU32, EventRoutingError};
use satrs::params::WritableToBeBytes;
use satrs::pus::event::EventTmHookProvider;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::EcssTmSenderCore;
use satrs::request::UniqueApidTargetId;
use satrs::{
event_man::{
EventManagerWithBoundedMpsc, EventSendProvider, EventU32SenderMpscBounded,
MpscEventReceiver,
},
events::EventU32,
params::Params,
pus::{
event_man::{
DefaultPusEventU32Dispatcher, EventReporter, EventRequest, EventRequestWithToken,
},
verification::{TcStateStarted, VerificationReportingProvider, VerificationToken},
EcssTmSender,
},
spacepackets::time::cds::{self, CdsTime},
spacepackets::time::cds::CdsTime,
};
use satrs_example::config::PUS_APID;
use satrs_example::config::components::PUS_EVENT_MANAGEMENT;
use crate::update_time;
pub struct PusEventHandler<VerificationReporter: VerificationReportingProvider> {
// This helper sets the APID of the event sender for the PUS telemetry.
#[derive(Default)]
pub struct EventApidSetter {
pub next_apid: u16,
}
impl EventTmHookProvider for EventApidSetter {
fn modify_tm(&self, tm: &mut satrs::spacepackets::ecss::tm::PusTmCreator) {
tm.set_apid(self.next_apid);
}
}
/// The PUS event handler subscribes for all events and converts them into ECSS PUS 5 event
/// packets. It also handles the verification completion of PUS event service requests.
pub struct PusEventHandler<TmSender: EcssTmSenderCore> {
event_request_rx: mpsc::Receiver<EventRequestWithToken>,
pus_event_dispatcher: DefaultPusEventU32Dispatcher<()>,
pus_event_man_rx: mpsc::Receiver<(EventU32, Option<Params>)>,
tm_sender: Box<dyn EcssTmSender>,
pus_event_man_rx: mpsc::Receiver<EventMessageU32>,
tm_sender: TmSender,
time_provider: CdsTime,
timestamp: [u8; 7],
verif_handler: VerificationReporter,
event_apid_setter: EventApidSetter,
}
/*
*/
impl<VerificationReporter: VerificationReportingProvider> PusEventHandler<VerificationReporter> {
impl<TmSender: EcssTmSenderCore> PusEventHandler<TmSender> {
pub fn new(
tm_sender: TmSender,
verif_handler: VerificationReporter,
event_manager: &mut EventManagerWithBoundedMpsc,
event_request_rx: mpsc::Receiver<EventRequestWithToken>,
tm_sender: impl EcssTmSender,
) -> Self {
let event_queue_cap = 30;
let (pus_event_man_tx, pus_event_man_rx) = mpsc::sync_channel(event_queue_cap);
// All events sent to the manager are routed to the PUS event manager, which generates PUS event
// telemetry for each event.
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
let event_reporter = EventReporter::new(PUS_EVENT_MANAGEMENT.raw(), 0, 0, 128).unwrap();
let pus_event_dispatcher =
DefaultPusEventU32Dispatcher::new_with_default_backend(event_reporter);
let pus_event_man_send_provider =
EventU32SenderMpscBounded::new(1, pus_event_man_tx, event_queue_cap);
let pus_event_man_send_provider = EventU32SenderMpscBounded::new(
PUS_EVENT_MANAGEMENT.raw(),
pus_event_man_tx,
event_queue_cap,
);
event_manager.subscribe_all(pus_event_man_send_provider.channel_id());
event_manager.subscribe_all(pus_event_man_send_provider.target_id());
event_manager.add_sender(pus_event_man_send_provider);
Self {
event_request_rx,
pus_event_dispatcher,
pus_event_man_rx,
time_provider: cds::CdsTime::new_with_u16_days(0, 0),
time_provider: CdsTime::new_with_u16_days(0, 0),
timestamp: [0; 7],
verif_handler,
tm_sender: Box::new(tm_sender),
tm_sender,
event_apid_setter: EventApidSetter::default(),
}
}
@ -71,7 +92,7 @@ impl<VerificationReporter: VerificationReportingProvider> PusEventHandler<Verifi
.try_into()
.expect("expected start verification token");
self.verif_handler
.completion_success(started_token, timestamp)
.completion_success(&self.tm_sender, started_token, timestamp)
.expect("Sending completion success failed");
};
// handle event requests
@ -97,23 +118,29 @@ impl<VerificationReporter: VerificationReportingProvider> PusEventHandler<Verifi
pub fn generate_pus_event_tm(&mut self) {
// Perform the generation of PUS event packets
if let Ok((event, _param)) = self.pus_event_man_rx.try_recv() {
if let Ok(event_msg) = self.pus_event_man_rx.try_recv() {
update_time(&mut self.time_provider, &mut self.timestamp);
let param_vec = event_msg.params().map_or(Vec::new(), |param| {
param.to_vec().expect("failed to convert params to vec")
});
self.event_apid_setter.next_apid = UniqueApidTargetId::from(event_msg.sender_id()).apid;
self.pus_event_dispatcher
.generate_pus_event_tm_generic(
self.tm_sender.upcast_mut(),
&self.tm_sender,
&self.timestamp,
event,
None,
event_msg.event(),
Some(&param_vec),
)
.expect("Sending TM as event failed");
}
}
}
/// This is a thin wrapper around the event manager which also caches the sender component
/// used to send events to the event manager.
pub struct EventManagerWrapper {
event_manager: EventManagerWithBoundedMpsc,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
event_sender: mpsc::Sender<EventMessageU32>,
}
impl EventManagerWrapper {
@ -121,14 +148,15 @@ impl EventManagerWrapper {
// The sender handle is the primary sender handle for all components which want to create events.
// The event manager will receive the RX handle to receive all the events.
let (event_sender, event_man_rx) = mpsc::channel();
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
let event_recv = MpscEventReceiver::new(event_man_rx);
Self {
event_manager: EventManagerWithBoundedMpsc::new(event_recv),
event_sender,
}
}
pub fn clone_event_sender(&self) -> mpsc::Sender<(EventU32, Option<Params>)> {
// Returns a cached event sender to send events to the event manager for routing.
pub fn clone_event_sender(&self) -> mpsc::Sender<EventMessageU32> {
self.event_sender.clone()
}
@ -137,30 +165,34 @@ impl EventManagerWrapper {
}
pub fn try_event_routing(&mut self) {
let error_handler = |event_msg: &EventMessageU32, error: EventRoutingError| {
self.routing_error_handler(event_msg, error)
};
// Perform the event routing.
self.event_manager
.try_event_handling()
.expect("event handling failed");
self.event_manager.try_event_handling(error_handler);
}
pub fn routing_error_handler(&self, event_msg: &EventMessageU32, error: EventRoutingError) {
log::warn!("event routing error for event {event_msg:?}: {error:?}");
}
}
pub struct EventHandler<VerificationReporter: VerificationReportingProvider> {
pub struct EventHandler<TmSender: EcssTmSenderCore> {
pub event_man_wrapper: EventManagerWrapper,
pub pus_event_handler: PusEventHandler<VerificationReporter>,
pub pus_event_handler: PusEventHandler<TmSender>,
}
impl<VerificationReporter: VerificationReportingProvider> EventHandler<VerificationReporter> {
impl<TmSender: EcssTmSenderCore> EventHandler<TmSender> {
pub fn new(
tm_sender: impl EcssTmSender,
verif_handler: VerificationReporter,
tm_sender: TmSender,
event_request_rx: mpsc::Receiver<EventRequestWithToken>,
) -> Self {
let mut event_man_wrapper = EventManagerWrapper::new();
let pus_event_handler = PusEventHandler::new(
verif_handler,
tm_sender,
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
event_man_wrapper.event_manager(),
event_request_rx,
tm_sender,
);
Self {
event_man_wrapper,
@ -168,7 +200,7 @@ impl<VerificationReporter: VerificationReportingProvider> EventHandler<Verificat
}
}
pub fn clone_event_sender(&self) -> mpsc::Sender<(EventU32, Option<Params>)> {
pub fn clone_event_sender(&self) -> mpsc::Sender<EventMessageU32> {
self.event_man_wrapper.clone_event_sender()
}

View File

@ -1,27 +1,25 @@
use derive_new::new;
use satrs::hk::UniqueId;
use satrs::request::UniqueApidTargetId;
use satrs::spacepackets::ByteConversionError;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum AcsHkIds {
TestMgmSet = 1,
}
#[derive(Debug, new, Copy, Clone)]
pub struct HkUniqueId {
target_id: u32,
set_id: u32,
target_id: UniqueApidTargetId,
set_id: UniqueId,
}
impl HkUniqueId {
#[allow(dead_code)]
pub fn target_id(&self) -> u32 {
pub fn target_id(&self) -> UniqueApidTargetId {
self.target_id
}
#[allow(dead_code)]
pub fn set_id(&self) -> u32 {
pub fn set_id(&self) -> UniqueId {
self.set_id
}
#[allow(dead_code)]
pub fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
if buf.len() < 8 {
return Err(ByteConversionError::ToSliceTooSmall {
@ -29,7 +27,7 @@ impl HkUniqueId {
expected: 8,
});
}
buf[0..4].copy_from_slice(&self.target_id.to_be_bytes());
buf[0..4].copy_from_slice(&self.target_id.unique_id.to_be_bytes());
buf[4..8].copy_from_slice(&self.set_id.to_be_bytes());
Ok(8)

View File

@ -1 +1,39 @@
use satrs::spacepackets::time::{cds::CdsTime, TimeWriter};
pub mod config;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum DeviceMode {
Off = 0,
On = 1,
Normal = 2,
}
pub struct TimeStampHelper {
stamper: CdsTime,
time_stamp: [u8; 7],
}
impl TimeStampHelper {
pub fn stamp(&self) -> &[u8] {
&self.time_stamp
}
pub fn update_from_now(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
}
impl Default for TimeStampHelper {
fn default() -> Self {
Self {
stamper: CdsTime::now_with_u16_days().expect("creating time stamper failed"),
time_stamp: Default::default(),
}
}
}

View File

@ -17,52 +17,44 @@ use log::info;
use pus::test::create_test_service_dynamic;
use satrs::hal::std::tcp_server::ServerConfig;
use satrs::hal::std::udp_server::UdpTcServer;
use satrs::request::TargetAndApidId;
use satrs::request::GenericMessage;
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs_example::config::pool::{create_sched_tc_pool, create_static_pools};
use satrs_example::config::tasks::{
FREQ_MS_AOCS, FREQ_MS_EVENT_HANDLING, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC,
};
use satrs_example::config::{RequestTargetId, TmSenderId, OBSW_SERVER_ADDR, PUS_APID, SERVER_PORT};
use satrs_example::config::{OBSW_SERVER_ADDR, PACKET_ID_VALIDATOR, SERVER_PORT};
use tmtc::PusTcSourceProviderDynamic;
use udp::DynamicUdpTmHandler;
use crate::acs::AcsTask;
use crate::acs::mgm::{MgmHandlerLis3Mdl, MpscModeLeafInterface, SpiDummyInterface};
use crate::ccsds::CcsdsReceiver;
use crate::logger::setup_logger;
use crate::pus::action::{create_action_service_dynamic, create_action_service_static};
use crate::pus::event::{create_event_service_dynamic, create_event_service_static};
use crate::pus::hk::{create_hk_service_dynamic, create_hk_service_static};
use crate::pus::mode::{create_mode_service_dynamic, create_mode_service_static};
use crate::pus::scheduler::{create_scheduler_service_dynamic, create_scheduler_service_static};
use crate::pus::test::create_test_service_static;
use crate::pus::{PusReceiver, PusTcMpscRouter};
use crate::requests::{GenericRequestRouter, RequestWithToken};
use crate::requests::{CompositeRequest, GenericRequestRouter};
use crate::tcp::{SyncTcpTmSource, TcpTask};
use crate::tmtc::{
PusTcSourceProviderSharedPool, SharedTcPool, TcSourceTaskDynamic, TcSourceTaskStatic,
};
use crate::udp::{StaticUdpTmHandler, UdpTmtcServer};
use satrs::mode::ModeRequest;
use satrs::pus::event_man::EventRequestWithToken;
use satrs::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender};
use satrs::pus::{EcssTmSender, TmAsVecSenderWithId, TmInSharedPoolSenderWithId};
use satrs::pus::TmInSharedPoolSender;
use satrs::spacepackets::{time::cds::CdsTime, time::TimeWriter};
use satrs::tmtc::CcsdsDistributor;
use satrs::ChannelId;
use satrs_example::config::components::MGM_HANDLER_0;
use std::net::{IpAddr, SocketAddr};
use std::sync::mpsc::{self, channel};
use std::sync::mpsc;
use std::sync::{Arc, RwLock};
use std::thread;
use std::time::Duration;
fn create_verification_reporter<Sender: EcssTmSender + Clone>(
verif_sender: Sender,
) -> VerificationReporterWithSender<Sender> {
let verif_cfg = VerificationReporterCfg::new(PUS_APID, 1, 2, 8).unwrap();
// Every software component which needs to generate verification telemetry, gets a cloned
// verification reporter.
VerificationReporterWithSender::new(&verif_cfg, verif_sender)
}
#[allow(dead_code)]
fn static_tmtc_pool_main() {
let (tm_pool, tc_pool) = create_static_pools();
@ -74,20 +66,21 @@ fn static_tmtc_pool_main() {
let (tm_funnel_tx, tm_funnel_rx) = mpsc::sync_channel(50);
let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
// Every software component which needs to generate verification telemetry, receives a cloned
// verification reporter.
let verif_reporter = create_verification_reporter(TmInSharedPoolSenderWithId::new(
TmSenderId::PusVerification as ChannelId,
"verif_sender",
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
));
let tm_funnel_tx_sender =
TmInSharedPoolSender::new(shared_tm_pool.clone(), tm_funnel_tx.clone());
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
mpsc::channel::<GenericMessage<CompositeRequest>>();
let (mgm_handler_mode_tx, mgm_handler_mode_rx) = mpsc::channel::<GenericMessage<ModeRequest>>();
let acs_target_id = TargetAndApidId::new(PUS_APID, RequestTargetId::AcsSubsystem as u32);
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let mut request_map = GenericRequestRouter::default();
request_map.0.insert(acs_target_id.into(), acs_thread_tx);
request_map
.composite_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_composite_tx);
request_map
.mode_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_mode_tx);
// This helper structure is used by all telecommand providers which need to send telecommands
// to the TC source.
@ -103,82 +96,80 @@ fn static_tmtc_pool_main() {
// The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation.
let mut event_handler = EventHandler::new(
TmInSharedPoolSenderWithId::new(
TmSenderId::AllEvents as ChannelId,
"ALL_EVENTS_TX",
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
),
verif_reporter.clone(),
event_request_rx,
);
let mut event_handler = EventHandler::new(tm_funnel_tx.clone(), event_request_rx);
let (pus_test_tx, pus_test_rx) = mpsc::channel();
let (pus_event_tx, pus_event_rx) = mpsc::channel();
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
let (pus_test_tx, pus_test_rx) = channel();
let (pus_event_tx, pus_event_rx) = channel();
let (pus_sched_tx, pus_sched_rx) = channel();
let (pus_hk_tx, pus_hk_rx) = channel();
let (pus_action_tx, pus_action_rx) = channel();
let pus_router = PusTcMpscRouter {
test_service_receiver: pus_test_tx,
event_service_receiver: pus_event_tx,
sched_service_receiver: pus_sched_tx,
hk_service_receiver: pus_hk_tx,
action_service_receiver: pus_action_tx,
test_tc_sender: pus_test_tx,
event_tc_sender: pus_event_tx,
sched_tc_sender: pus_sched_tx,
hk_tc_sender: pus_hk_tx,
action_tc_sender: pus_action_tx,
mode_tc_sender: pus_mode_tx,
};
let pus_test_service = create_test_service_static(
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
verif_reporter.clone(),
tm_funnel_tx_sender.clone(),
shared_tc_pool.pool.clone(),
event_handler.clone_event_sender(),
pus_test_rx,
);
let pus_scheduler_service = create_scheduler_service_static(
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
verif_reporter.clone(),
tm_funnel_tx_sender.clone(),
tc_source.clone(),
pus_sched_rx,
create_sched_tc_pool(),
);
let pus_event_service = create_event_service_static(
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
verif_reporter.clone(),
tm_funnel_tx_sender.clone(),
shared_tc_pool.pool.clone(),
pus_event_rx,
event_request_tx,
);
let pus_action_service = create_action_service_static(
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
verif_reporter.clone(),
tm_funnel_tx_sender.clone(),
shared_tc_pool.pool.clone(),
pus_action_rx,
request_map.clone(),
pus_action_reply_rx,
);
let pus_hk_service = create_hk_service_static(
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
verif_reporter.clone(),
tm_funnel_tx_sender.clone(),
shared_tc_pool.pool.clone(),
pus_hk_rx,
request_map.clone(),
pus_hk_reply_rx,
);
let pus_mode_service = create_mode_service_static(
tm_funnel_tx_sender.clone(),
shared_tc_pool.pool.clone(),
pus_mode_rx,
request_map,
pus_mode_reply_rx,
);
let mut pus_stack = PusStack::new(
pus_test_service,
pus_hk_service,
pus_event_service,
pus_action_service,
pus_scheduler_service,
pus_test_service,
pus_mode_service,
);
let ccsds_receiver = CcsdsReceiver { tc_source };
let mut tmtc_task = TcSourceTaskStatic::new(
shared_tc_pool.clone(),
tc_source_rx,
PusReceiver::new(verif_reporter.clone(), pus_router),
PusReceiver::new(tm_funnel_tx_sender, pus_router),
);
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
@ -200,20 +191,10 @@ fn static_tmtc_pool_main() {
tcp_server_cfg,
sync_tm_tcp_source.clone(),
tcp_ccsds_distributor,
PACKET_ID_VALIDATOR.clone(),
)
.expect("tcp server creation failed");
let mut acs_task = AcsTask::new(
TmInSharedPoolSenderWithId::new(
TmSenderId::AcsSubsystem as ChannelId,
"ACS_TASK_SENDER",
shared_tm_pool.clone(),
tm_funnel_tx.clone(),
),
acs_thread_rx,
verif_reporter,
);
let mut tm_funnel = TmFunnelStatic::new(
shared_tm_pool,
sync_tm_tcp_source,
@ -221,6 +202,27 @@ fn static_tmtc_pool_main() {
tm_server_tx,
);
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
mpsc::channel();
let dummy_spi_interface = SpiDummyInterface::default();
let shared_mgm_set = Arc::default();
let mode_leaf_interface = MpscModeLeafInterface {
request_rx: mgm_handler_mode_rx,
reply_tx_to_pus: pus_mode_reply_tx,
reply_tx_to_parent: mgm_handler_mode_reply_to_parent_tx,
};
let mut mgm_handler = MgmHandlerLis3Mdl::new(
MGM_HANDLER_0,
"MGM_0",
mode_leaf_interface,
mgm_handler_composite_rx,
pus_hk_reply_tx,
tm_funnel_tx,
dummy_spi_interface,
shared_mgm_set,
);
info!("Starting TMTC and UDP task");
let jh_udp_tmtc = thread::Builder::new()
.name("TMTC and UDP".to_string())
@ -266,7 +268,7 @@ fn static_tmtc_pool_main() {
let jh_aocs = thread::Builder::new()
.name("AOCS".to_string())
.spawn(move || loop {
acs_task.periodic_operation();
mgm_handler.periodic_operation();
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
})
.unwrap();
@ -300,22 +302,23 @@ fn static_tmtc_pool_main() {
#[allow(dead_code)]
fn dyn_tmtc_pool_main() {
let (tc_source_tx, tc_source_rx) = channel();
let (tm_funnel_tx, tm_funnel_rx) = channel();
let (tm_server_tx, tm_server_rx) = channel();
// Every software component which needs to generate verification telemetry, gets a cloned
// verification reporter.
let verif_reporter = create_verification_reporter(TmAsVecSenderWithId::new(
TmSenderId::PusVerification as ChannelId,
"verif_sender",
tm_funnel_tx.clone(),
));
let (tc_source_tx, tc_source_rx) = mpsc::channel();
let (tm_funnel_tx, tm_funnel_rx) = mpsc::channel();
let (tm_server_tx, tm_server_rx) = mpsc::channel();
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
mpsc::channel::<GenericMessage<CompositeRequest>>();
let (mgm_handler_mode_tx, mgm_handler_mode_rx) = mpsc::channel::<GenericMessage<ModeRequest>>();
let acs_target_id = TargetAndApidId::new(PUS_APID, RequestTargetId::AcsSubsystem as u32);
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let mut request_map = GenericRequestRouter::default();
request_map.0.insert(acs_target_id.into(), acs_thread_tx);
request_map
.composite_router_map
.insert(MGM_HANDLER_0.raw(), mgm_handler_composite_tx);
request_map
.mode_router_map
.insert(MGM_HANDLER_0.raw(), mgm_handler_mode_tx);
let tc_source = PusTcSourceProviderDynamic(tc_source_tx);
@ -325,74 +328,74 @@ fn dyn_tmtc_pool_main() {
let (event_request_tx, event_request_rx) = mpsc::channel::<EventRequestWithToken>();
// The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation.
let mut event_handler = EventHandler::new(
TmAsVecSenderWithId::new(
TmSenderId::AllEvents as ChannelId,
"ALL_EVENTS_TX",
tm_funnel_tx.clone(),
),
verif_reporter.clone(),
event_request_rx,
);
let mut event_handler = EventHandler::new(tm_funnel_tx.clone(), event_request_rx);
let (pus_test_tx, pus_test_rx) = mpsc::channel();
let (pus_event_tx, pus_event_rx) = mpsc::channel();
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
let (pus_test_tx, pus_test_rx) = channel();
let (pus_event_tx, pus_event_rx) = channel();
let (pus_sched_tx, pus_sched_rx) = channel();
let (pus_hk_tx, pus_hk_rx) = channel();
let (pus_action_tx, pus_action_rx) = channel();
let pus_router = PusTcMpscRouter {
test_service_receiver: pus_test_tx,
event_service_receiver: pus_event_tx,
sched_service_receiver: pus_sched_tx,
hk_service_receiver: pus_hk_tx,
action_service_receiver: pus_action_tx,
test_tc_sender: pus_test_tx,
event_tc_sender: pus_event_tx,
sched_tc_sender: pus_sched_tx,
hk_tc_sender: pus_hk_tx,
action_tc_sender: pus_action_tx,
mode_tc_sender: pus_mode_tx,
};
let pus_test_service = create_test_service_dynamic(
tm_funnel_tx.clone(),
verif_reporter.clone(),
event_handler.clone_event_sender(),
pus_test_rx,
);
let pus_scheduler_service = create_scheduler_service_dynamic(
tm_funnel_tx.clone(),
verif_reporter.clone(),
tc_source.0.clone(),
pus_sched_rx,
create_sched_tc_pool(),
);
let pus_event_service = create_event_service_dynamic(
tm_funnel_tx.clone(),
verif_reporter.clone(),
pus_event_rx,
event_request_tx,
);
let pus_event_service =
create_event_service_dynamic(tm_funnel_tx.clone(), pus_event_rx, event_request_tx);
let pus_action_service = create_action_service_dynamic(
tm_funnel_tx.clone(),
verif_reporter.clone(),
pus_action_rx,
request_map.clone(),
pus_action_reply_rx,
);
let pus_hk_service = create_hk_service_dynamic(
tm_funnel_tx.clone(),
verif_reporter.clone(),
pus_hk_rx,
request_map.clone(),
pus_hk_reply_rx,
);
let pus_mode_service = create_mode_service_dynamic(
tm_funnel_tx.clone(),
pus_mode_rx,
request_map,
pus_mode_reply_rx,
);
let mut pus_stack = PusStack::new(
pus_test_service,
pus_hk_service,
pus_event_service,
pus_action_service,
pus_scheduler_service,
pus_test_service,
pus_mode_service,
);
let ccsds_receiver = CcsdsReceiver { tc_source };
let mut tmtc_task = TcSourceTaskDynamic::new(
tc_source_rx,
PusReceiver::new(verif_reporter.clone(), pus_router),
PusReceiver::new(tm_funnel_tx.clone(), pus_router),
);
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
@ -413,20 +416,32 @@ fn dyn_tmtc_pool_main() {
tcp_server_cfg,
sync_tm_tcp_source.clone(),
tcp_ccsds_distributor,
PACKET_ID_VALIDATOR.clone(),
)
.expect("tcp server creation failed");
let mut acs_task = AcsTask::new(
TmAsVecSenderWithId::new(
TmSenderId::AcsSubsystem as ChannelId,
"ACS_TASK_SENDER",
tm_funnel_tx.clone(),
),
acs_thread_rx,
verif_reporter,
);
let mut tm_funnel = TmFunnelDynamic::new(sync_tm_tcp_source, tm_funnel_rx, tm_server_tx);
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
mpsc::channel();
let dummy_spi_interface = SpiDummyInterface::default();
let shared_mgm_set = Arc::default();
let mode_leaf_interface = MpscModeLeafInterface {
request_rx: mgm_handler_mode_rx,
reply_tx_to_pus: pus_mode_reply_tx,
reply_tx_to_parent: mgm_handler_mode_reply_to_parent_tx,
};
let mut mgm_handler = MgmHandlerLis3Mdl::new(
MGM_HANDLER_0,
"MGM_0",
mode_leaf_interface,
mgm_handler_composite_rx,
pus_hk_reply_tx,
tm_funnel_tx,
dummy_spi_interface,
shared_mgm_set,
);
info!("Starting TMTC and UDP task");
let jh_udp_tmtc = thread::Builder::new()
.name("TMTC and UDP".to_string())
@ -472,7 +487,7 @@ fn dyn_tmtc_pool_main() {
let jh_aocs = thread::Builder::new()
.name("AOCS".to_string())
.spawn(move || loop {
acs_task.periodic_operation();
mgm_handler.periodic_operation();
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
})
.unwrap();

View File

@ -1,181 +1,274 @@
use log::{error, warn};
use satrs::action::ActionRequest;
use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::action::{PusActionToRequestConverter, PusService8ActionHandler};
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
use satrs::action::{ActionRequest, ActionRequestVariant};
use satrs::params::WritableToBeBytes;
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::action::{
ActionReplyVariant, ActivePusActionRequestStd, DefaultActiveActionRequestMap, PusActionReply,
};
use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReportingProvider, VerificationToken,
FailParams, FailParamsWithStep, TcStateAccepted, TcStateStarted, VerificationReporter,
VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc,
TmInSharedPoolSenderWithBoundedMpsc, TmInSharedPoolSenderWithId,
ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, EcssTmSenderCore, EcssTmtcError, GenericConversionError, MpscTcReceiver,
MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded, PusPacketHandlerResult, PusReplyHandler,
PusServiceHelper, PusTcToRequestConverter, PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::request::TargetAndApidId;
use satrs::request::{GenericMessage, UniqueApidTargetId};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::PusPacket;
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::{ChannelId, TargetId};
use satrs_example::config::{tmtc_err, TcReceiverId, TmSenderId, PUS_APID};
use std::sync::mpsc::{self};
use satrs::spacepackets::ecss::{EcssEnumU16, PusPacket};
use satrs_example::config::components::PUS_ACTION_SERVICE;
use satrs_example::config::tmtc_err;
use std::sync::mpsc;
use std::time::Duration;
use crate::requests::GenericRequestRouter;
use super::GenericRoutingErrorHandler;
use super::{
create_verification_reporter, generic_pus_request_timeout_handler, HandlingStatus,
PusTargetedRequestService, TargetedPusService,
};
pub struct ActionReplyHandler {
fail_data_buf: [u8; 128],
}
impl Default for ActionReplyHandler {
fn default() -> Self {
Self {
fail_data_buf: [0; 128],
}
}
}
impl PusReplyHandler<ActivePusActionRequestStd, PusActionReply> for ActionReplyHandler {
type Error = EcssTmtcError;
fn handle_unrequested_reply(
&mut self,
reply: &GenericMessage<PusActionReply>,
_tm_sender: &impl EcssTmSenderCore,
) -> Result<(), Self::Error> {
warn!("received unexpected reply for service 8: {reply:?}");
Ok(())
}
fn handle_reply(
&mut self,
reply: &GenericMessage<PusActionReply>,
active_request: &ActivePusActionRequestStd,
tm_sender: &(impl EcssTmSenderCore + ?Sized),
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<bool, Self::Error> {
let verif_token: VerificationToken<TcStateStarted> = active_request
.token()
.try_into()
.expect("invalid token state");
let remove_entry = match &reply.message.variant {
ActionReplyVariant::CompletionFailed { error_code, params } => {
let mut fail_data_len = 0;
if let Some(params) = params {
fail_data_len = params.write_to_be_bytes(&mut self.fail_data_buf)?;
}
verification_handler.completion_failure(
tm_sender,
verif_token,
FailParams::new(time_stamp, error_code, &self.fail_data_buf[..fail_data_len]),
)?;
true
}
ActionReplyVariant::StepFailed {
error_code,
step,
params,
} => {
let mut fail_data_len = 0;
if let Some(params) = params {
fail_data_len = params.write_to_be_bytes(&mut self.fail_data_buf)?;
}
verification_handler.step_failure(
tm_sender,
verif_token,
FailParamsWithStep::new(
time_stamp,
&EcssEnumU16::new(*step),
error_code,
&self.fail_data_buf[..fail_data_len],
),
)?;
true
}
ActionReplyVariant::Completed => {
verification_handler.completion_success(tm_sender, verif_token, time_stamp)?;
true
}
ActionReplyVariant::StepSuccess { step } => {
verification_handler.step_success(
tm_sender,
&verif_token,
time_stamp,
EcssEnumU16::new(*step),
)?;
false
}
_ => false,
};
Ok(remove_entry)
}
fn handle_request_timeout(
&mut self,
active_request: &ActivePusActionRequestStd,
tm_sender: &impl EcssTmSenderCore,
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<(), Self::Error> {
generic_pus_request_timeout_handler(
tm_sender,
active_request,
verification_handler,
time_stamp,
"action",
)
}
}
#[derive(Default)]
pub struct ExampleActionRequestConverter {}
pub struct ActionRequestConverter {}
impl PusActionToRequestConverter for ExampleActionRequestConverter {
type Error = PusPacketHandlingError;
impl PusTcToRequestConverter<ActivePusActionRequestStd, ActionRequest> for ActionRequestConverter {
type Error = GenericConversionError;
fn convert(
&mut self,
token: VerificationToken<TcStateAccepted>,
tc: &PusTcReader,
time_stamp: &[u8],
tm_sender: &(impl EcssTmSenderCore + ?Sized),
verif_reporter: &impl VerificationReportingProvider,
) -> Result<(TargetId, ActionRequest), Self::Error> {
time_stamp: &[u8],
) -> Result<(ActivePusActionRequestStd, ActionRequest), Self::Error> {
let subservice = tc.subservice();
let user_data = tc.user_data();
if user_data.len() < 8 {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new_no_fail_data(time_stamp, &tmtc_err::NOT_ENOUGH_APP_DATA),
)
.expect("Sending start failure failed");
return Err(PusPacketHandlingError::NotEnoughAppData {
return Err(GenericConversionError::NotEnoughAppData {
expected: 8,
found: user_data.len(),
});
}
let target_id = TargetAndApidId::from_pus_tc(tc).unwrap();
let target_id_and_apid = UniqueApidTargetId::from_pus_tc(tc).unwrap();
let action_id = u32::from_be_bytes(user_data[4..8].try_into().unwrap());
if subservice == 128 {
let req_variant = if user_data.len() == 8 {
ActionRequestVariant::NoData
} else {
ActionRequestVariant::VecData(user_data[8..].to_vec())
};
Ok((
target_id.raw(),
ActionRequest::UnsignedIdAndVecData {
ActivePusActionRequestStd::new(
action_id,
data: user_data[8..].to_vec(),
},
target_id_and_apid.into(),
token.into(),
Duration::from_secs(30),
),
ActionRequest::new(action_id, req_variant),
))
} else {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new_no_fail_data(time_stamp, &tmtc_err::INVALID_PUS_SUBSERVICE),
)
.expect("Sending start failure failed");
Err(PusPacketHandlingError::InvalidSubservice(subservice))
Err(GenericConversionError::InvalidSubservice(subservice))
}
}
}
pub fn create_action_service_static(
shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_pool: SharedStaticMemoryPool,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter,
) -> Pus8Wrapper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let action_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusAction as ChannelId,
"PUS_8_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let action_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusAction as ChannelId,
"PUS_8_TC_RECV",
pus_action_rx,
);
let pus_8_handler = PusService8ActionHandler::new(
reply_receiver: mpsc::Receiver<GenericMessage<PusActionReply>>,
) -> ActionServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let action_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
action_srv_receiver,
action_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_ACTION_SERVICE.id(),
pus_action_rx,
tm_sender,
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
),
ExampleActionRequestConverter::default(),
ActionRequestConverter::default(),
// TODO: Implementation which does not use run-time allocation? Maybe something like
// a bounded wrapper which pre-allocates using [HashMap::with_capacity]..
DefaultActiveActionRequestMap::default(),
ActionReplyHandler::default(),
action_router,
GenericRoutingErrorHandler::<8>::default(),
reply_receiver,
);
Pus8Wrapper { pus_8_handler }
ActionServiceWrapper {
service: action_request_handler,
}
}
pub fn create_action_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithVecMpscSender,
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter,
) -> Pus8Wrapper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let action_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusAction as ChannelId,
"PUS_8_TM_SENDER",
tm_funnel_tx.clone(),
);
let action_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusAction as ChannelId,
"PUS_8_TC_RECV",
pus_action_rx,
);
let pus_8_handler = PusService8ActionHandler::new(
reply_receiver: mpsc::Receiver<GenericMessage<PusActionReply>>,
) -> ActionServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let action_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
action_srv_receiver,
action_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_ACTION_SERVICE.id(),
pus_action_rx,
tm_funnel_tx,
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
EcssTcInVecConverter::default(),
),
ExampleActionRequestConverter::default(),
ActionRequestConverter::default(),
DefaultActiveActionRequestMap::default(),
ActionReplyHandler::default(),
action_router,
GenericRoutingErrorHandler::<8>::default(),
reply_receiver,
);
Pus8Wrapper { pus_8_handler }
ActionServiceWrapper {
service: action_request_handler,
}
}
pub struct Pus8Wrapper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub(crate) pus_8_handler: PusService8ActionHandler<
TcReceiver,
pub struct ActionServiceWrapper<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
{
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
ExampleActionRequestConverter,
GenericRequestRouter,
GenericRoutingErrorHandler<8>,
ActionRequestConverter,
ActionReplyHandler,
DefaultActiveActionRequestMap,
ActivePusActionRequestStd,
ActionRequest,
PusActionReply,
>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
for ActionServiceWrapper<TmSender, TcInMemConverter>
{
pub fn handle_next_packet(&mut self) -> bool {
match self.pus_8_handler.handle_one_tc() {
/// Returns [true] if the packet handling is finished.
fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool {
match self.service.poll_and_handle_next_tc(time_stamp) {
Ok(result) => match result {
PusPacketHandlerResult::RequestHandled => {}
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
@ -197,4 +290,463 @@ impl<
}
false
}
fn poll_and_handle_next_reply(&mut self, time_stamp: &[u8]) -> HandlingStatus {
// This only fails if all senders disconnected. Treat it like an empty queue.
self.service
.poll_and_check_next_reply(time_stamp)
.unwrap_or_else(|e| {
warn!("PUS 8: Handling reply failed with error {e:?}");
HandlingStatus::Empty
})
}
fn check_for_request_timeouts(&mut self) {
self.service.check_for_request_timeouts();
}
}
#[cfg(test)]
mod tests {
use satrs::pus::test_util::{
TEST_APID, TEST_COMPONENT_ID_0, TEST_COMPONENT_ID_1, TEST_UNIQUE_ID_0, TEST_UNIQUE_ID_1,
};
use satrs::pus::verification;
use satrs::pus::verification::test_util::TestVerificationReporter;
use satrs::request::MessageMetadata;
use satrs::ComponentId;
use satrs::{
res_code::ResultU16,
spacepackets::{
ecss::{
tc::{PusTcCreator, PusTcSecondaryHeader},
tm::PusTmReader,
WritablePusPacket,
},
SpHeader,
},
};
use crate::{
pus::tests::{PusConverterTestbench, ReplyHandlerTestbench, TargetedPusRequestTestbench},
requests::CompositeRequest,
};
use super::*;
impl
TargetedPusRequestTestbench<
ActionRequestConverter,
ActionReplyHandler,
DefaultActiveActionRequestMap,
ActivePusActionRequestStd,
ActionRequest,
PusActionReply,
>
{
pub fn new_for_action(owner_id: ComponentId, target_id: ComponentId) -> Self {
let _ = env_logger::builder().is_test(true).try_init();
let (tm_funnel_tx, tm_funnel_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let (action_reply_tx, action_reply_rx) = mpsc::channel();
let (action_req_tx, action_req_rx) = mpsc::channel();
let verif_reporter = TestVerificationReporter::new(owner_id);
let mut generic_req_router = GenericRequestRouter::default();
generic_req_router
.composite_router_map
.insert(target_id, action_req_tx);
Self {
service: PusTargetedRequestService::new(
PusServiceHelper::new(
owner_id,
pus_action_rx,
tm_funnel_tx.clone(),
verif_reporter,
EcssTcInVecConverter::default(),
),
ActionRequestConverter::default(),
DefaultActiveActionRequestMap::default(),
ActionReplyHandler::default(),
generic_req_router,
action_reply_rx,
),
request_id: None,
pus_packet_tx: pus_action_tx,
tm_funnel_rx,
reply_tx: action_reply_tx,
request_rx: action_req_rx,
}
}
pub fn verify_packet_started(&self) {
self.service
.service_helper
.common
.verif_reporter
.check_next_is_started_success(
self.service.service_helper.id(),
self.request_id.expect("request ID not set").into(),
);
}
pub fn verify_packet_completed(&self) {
self.service
.service_helper
.common
.verif_reporter
.check_next_is_completion_success(
self.service.service_helper.id(),
self.request_id.expect("request ID not set").into(),
);
}
pub fn verify_tm_empty(&self) {
let packet = self.tm_funnel_rx.try_recv();
if let Err(mpsc::TryRecvError::Empty) = packet {
} else {
let tm = packet.unwrap();
let unexpected_tm = PusTmReader::new(&tm.packet, 7).unwrap().0;
panic!("unexpected TM packet {unexpected_tm:?}");
}
}
pub fn verify_next_tc_is_handled_properly(&mut self, time_stamp: &[u8]) {
let result = self.service.poll_and_handle_next_tc(time_stamp);
if let Err(e) = result {
panic!("unexpected error {:?}", e);
}
let result = result.unwrap();
match result {
PusPacketHandlerResult::RequestHandled => (),
_ => panic!("unexpected result {result:?}"),
}
}
pub fn verify_all_tcs_handled(&mut self, time_stamp: &[u8]) {
let result = self.service.poll_and_handle_next_tc(time_stamp);
if let Err(e) = result {
panic!("unexpected error {:?}", e);
}
let result = result.unwrap();
match result {
PusPacketHandlerResult::Empty => (),
_ => panic!("unexpected result {result:?}"),
}
}
pub fn verify_next_reply_is_handled_properly(&mut self, time_stamp: &[u8]) {
let result = self.service.poll_and_check_next_reply(time_stamp);
assert!(result.is_ok());
assert_eq!(result.unwrap(), HandlingStatus::HandledOne);
}
pub fn verify_all_replies_handled(&mut self, time_stamp: &[u8]) {
let result = self.service.poll_and_check_next_reply(time_stamp);
assert!(result.is_ok());
assert_eq!(result.unwrap(), HandlingStatus::Empty);
}
pub fn add_tc(&mut self, tc: &PusTcCreator) {
self.request_id = Some(verification::RequestId::new(tc).into());
let token = self.service.service_helper.verif_reporter_mut().add_tc(tc);
let accepted_token = self
.service
.service_helper
.verif_reporter()
.acceptance_success(self.service.service_helper.tm_sender(), token, &[0; 7])
.expect("TC acceptance failed");
self.service
.service_helper
.verif_reporter()
.check_next_was_added(accepted_token.request_id());
let id = self.service.service_helper.id();
self.service
.service_helper
.verif_reporter()
.check_next_is_acceptance_success(id, accepted_token.request_id());
self.pus_packet_tx
.send(EcssTcAndToken::new(tc.to_vec().unwrap(), accepted_token))
.unwrap();
}
}
#[test]
fn basic_request() {
let mut testbench = TargetedPusRequestTestbench::new_for_action(
TEST_COMPONENT_ID_0.id(),
TEST_COMPONENT_ID_1.id(),
);
// Create a basic action request and verify forwarding.
let sp_header = SpHeader::new_from_apid(TEST_APID);
let sec_header = PusTcSecondaryHeader::new_simple(8, 128);
let action_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_1.to_be_bytes());
app_data[4..8].copy_from_slice(&action_id.to_be_bytes());
let pus8_packet = PusTcCreator::new(sp_header, sec_header, &app_data, true);
testbench.add_tc(&pus8_packet);
let time_stamp: [u8; 7] = [0; 7];
testbench.verify_next_tc_is_handled_properly(&time_stamp);
testbench.verify_all_tcs_handled(&time_stamp);
testbench.verify_packet_started();
let possible_req = testbench.request_rx.try_recv();
assert!(possible_req.is_ok());
let req = possible_req.unwrap();
if let CompositeRequest::Action(action_req) = req.message {
assert_eq!(action_req.action_id, action_id);
assert_eq!(action_req.variant, ActionRequestVariant::NoData);
let action_reply = PusActionReply::new(action_id, ActionReplyVariant::Completed);
testbench
.reply_tx
.send(GenericMessage::new(req.requestor_info, action_reply))
.unwrap();
} else {
panic!("unexpected request type");
}
testbench.verify_next_reply_is_handled_properly(&time_stamp);
testbench.verify_all_replies_handled(&time_stamp);
testbench.verify_packet_completed();
testbench.verify_tm_empty();
}
#[test]
fn basic_request_routing_error() {
let mut testbench = TargetedPusRequestTestbench::new_for_action(
TEST_COMPONENT_ID_0.id(),
TEST_COMPONENT_ID_1.id(),
);
// Create a basic action request and verify forwarding.
let sec_header = PusTcSecondaryHeader::new_simple(8, 128);
let action_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
// Invalid ID, routing should fail.
app_data[0..4].copy_from_slice(&0_u32.to_be_bytes());
app_data[4..8].copy_from_slice(&action_id.to_be_bytes());
let pus8_packet = PusTcCreator::new(
SpHeader::new_from_apid(TEST_APID),
sec_header,
&app_data,
true,
);
testbench.add_tc(&pus8_packet);
let time_stamp: [u8; 7] = [0; 7];
let result = testbench.service.poll_and_handle_next_tc(&time_stamp);
assert!(result.is_err());
// Verify the correct result and completion failure.
}
#[test]
fn converter_action_req_no_data() {
let mut testbench = PusConverterTestbench::new(
TEST_COMPONENT_ID_0.raw(),
ActionRequestConverter::default(),
);
let sec_header = PusTcSecondaryHeader::new_simple(8, 128);
let action_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
// Invalid ID, routing should fail.
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
app_data[4..8].copy_from_slice(&action_id.to_be_bytes());
let pus8_packet = PusTcCreator::new(
SpHeader::new_from_apid(TEST_APID),
sec_header,
&app_data,
true,
);
let token = testbench.add_tc(&pus8_packet);
let result = testbench.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0);
assert!(result.is_ok());
let (active_req, request) = result.unwrap();
if let ActionRequestVariant::NoData = request.variant {
assert_eq!(request.action_id, action_id);
assert_eq!(active_req.action_id, action_id);
assert_eq!(
active_req.target_id(),
UniqueApidTargetId::new(TEST_APID, TEST_UNIQUE_ID_0).raw()
);
assert_eq!(
active_req.token().request_id(),
testbench.request_id().unwrap()
);
} else {
panic!("unexpected action request variant");
}
}
#[test]
fn converter_action_req_with_data() {
let mut testbench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), ActionRequestConverter::default());
let sec_header = PusTcSecondaryHeader::new_simple(8, 128);
let action_id = 5_u32;
let mut app_data: [u8; 16] = [0; 16];
// Invalid ID, routing should fail.
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
app_data[4..8].copy_from_slice(&action_id.to_be_bytes());
for i in 0..8 {
app_data[i + 8] = i as u8;
}
let pus8_packet = PusTcCreator::new(
SpHeader::new_from_apid(TEST_APID),
sec_header,
&app_data,
true,
);
let token = testbench.add_tc(&pus8_packet);
let result = testbench.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0);
assert!(result.is_ok());
let (active_req, request) = result.unwrap();
if let ActionRequestVariant::VecData(vec) = request.variant {
assert_eq!(request.action_id, action_id);
assert_eq!(active_req.action_id, action_id);
assert_eq!(vec, app_data[8..].to_vec());
} else {
panic!("unexpected action request variant");
}
}
#[test]
fn reply_handling_completion_success() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_id = 5_u32;
let (req_id, active_req) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let active_action_req =
ActivePusActionRequestStd::new_from_common_req(action_id, active_req);
let reply = PusActionReply::new(action_id, ActionReplyVariant::Completed);
let generic_reply = GenericMessage::new(MessageMetadata::new(req_id.into(), 0), reply);
let result = testbench.handle_reply(&generic_reply, &active_action_req, &[]);
assert!(result.is_ok());
assert!(result.unwrap());
testbench.verif_reporter.assert_full_completion_success(
TEST_COMPONENT_ID_0.id(),
req_id,
None,
);
}
#[test]
fn reply_handling_completion_failure() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_id = 5_u32;
let (req_id, active_req) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let active_action_req =
ActivePusActionRequestStd::new_from_common_req(action_id, active_req);
let error_code = ResultU16::new(2, 3);
let reply = PusActionReply::new(
action_id,
ActionReplyVariant::CompletionFailed {
error_code,
params: None,
},
);
let generic_reply = GenericMessage::new(MessageMetadata::new(req_id.into(), 0), reply);
let result = testbench.handle_reply(&generic_reply, &active_action_req, &[]);
assert!(result.is_ok());
assert!(result.unwrap());
testbench.verif_reporter.assert_completion_failure(
TEST_COMPONENT_ID_0.into(),
req_id,
None,
error_code.raw() as u64,
);
}
#[test]
fn reply_handling_step_success() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_id = 5_u32;
let (req_id, active_req) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let active_action_req =
ActivePusActionRequestStd::new_from_common_req(action_id, active_req);
let reply = PusActionReply::new(action_id, ActionReplyVariant::StepSuccess { step: 1 });
let generic_reply = GenericMessage::new(MessageMetadata::new(req_id.into(), 0), reply);
let result = testbench.handle_reply(&generic_reply, &active_action_req, &[]);
assert!(result.is_ok());
// Entry should not be removed, completion not done yet.
assert!(!result.unwrap());
testbench.verif_reporter.check_next_was_added(req_id);
testbench
.verif_reporter
.check_next_is_acceptance_success(TEST_COMPONENT_ID_0.raw(), req_id);
testbench
.verif_reporter
.check_next_is_started_success(TEST_COMPONENT_ID_0.raw(), req_id);
testbench
.verif_reporter
.check_next_is_step_success(TEST_COMPONENT_ID_0.raw(), req_id, 1);
}
#[test]
fn reply_handling_step_failure() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_id = 5_u32;
let (req_id, active_req) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let active_action_req =
ActivePusActionRequestStd::new_from_common_req(action_id, active_req);
let error_code = ResultU16::new(2, 3);
let reply = PusActionReply::new(
action_id,
ActionReplyVariant::StepFailed {
error_code,
step: 1,
params: None,
},
);
let generic_reply = GenericMessage::new(MessageMetadata::new(req_id.into(), 0), reply);
let result = testbench.handle_reply(&generic_reply, &active_action_req, &[]);
assert!(result.is_ok());
assert!(result.unwrap());
testbench.verif_reporter.check_next_was_added(req_id);
testbench
.verif_reporter
.check_next_is_acceptance_success(TEST_COMPONENT_ID_0.id(), req_id);
testbench
.verif_reporter
.check_next_is_started_success(TEST_COMPONENT_ID_0.id(), req_id);
testbench.verif_reporter.check_next_is_step_failure(
TEST_COMPONENT_ID_0.id(),
req_id,
error_code.raw().into(),
);
}
#[test]
fn reply_handling_unrequested_reply() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_reply = PusActionReply::new(5_u32, ActionReplyVariant::Completed);
let unrequested_reply =
GenericMessage::new(MessageMetadata::new(10_u32, 15_u64), action_reply);
// Right now this function does not do a lot. We simply check that it does not panic or do
// weird stuff.
let result = testbench.handle_unrequested_reply(&unrequested_reply);
assert!(result.is_ok());
}
#[test]
fn reply_handling_reply_timeout() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), ActionReplyHandler::default());
let action_id = 5_u32;
let (req_id, active_request) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let result = testbench.handle_request_timeout(
&ActivePusActionRequestStd::new_from_common_req(action_id, active_request),
&[],
);
assert!(result.is_ok());
testbench.verif_reporter.assert_completion_failure(
TEST_COMPONENT_ID_0.raw(),
req_id,
None,
tmtc_err::REQUEST_TIMEOUT.raw() as u64,
);
}
}

View File

@ -1,113 +1,69 @@
use std::sync::mpsc;
use crate::pus::create_verification_reporter;
use log::{error, warn};
use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::event_man::EventRequestWithToken;
use satrs::pus::event_srv::PusService5EventHandler;
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::event_srv::PusEventServiceHandler;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
EcssTmSenderCore, MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded,
PusPacketHandlerResult, PusServiceHelper, PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::ChannelId;
use satrs_example::config::{TcReceiverId, TmSenderId, PUS_APID};
use satrs_example::config::components::PUS_EVENT_MANAGEMENT;
pub fn create_event_service_static(
shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_pool: SharedStaticMemoryPool,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> Pus5Wrapper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let event_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusEvent as ChannelId,
"PUS_5_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let event_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusEvent as ChannelId,
"PUS_5_TC_RECV",
pus_event_rx,
);
let pus_5_handler = PusService5EventHandler::new(
) -> EventServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let pus_5_handler = PusEventServiceHandler::new(
PusServiceHelper::new(
event_srv_receiver,
event_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_EVENT_MANAGEMENT.id(),
pus_event_rx,
tm_sender,
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
),
event_request_tx,
);
Pus5Wrapper { pus_5_handler }
EventServiceWrapper {
handler: pus_5_handler,
}
}
pub fn create_event_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithVecMpscSender,
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> Pus5Wrapper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let event_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusEvent as ChannelId,
"PUS_5_TM_SENDER",
tm_funnel_tx,
);
let event_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusEvent as ChannelId,
"PUS_5_TC_RECV",
pus_event_rx,
);
let pus_5_handler = PusService5EventHandler::new(
) -> EventServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus_5_handler = PusEventServiceHandler::new(
PusServiceHelper::new(
event_srv_receiver,
event_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_EVENT_MANAGEMENT.id(),
pus_event_rx,
tm_funnel_tx,
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
EcssTcInVecConverter::default(),
),
event_request_tx,
);
Pus5Wrapper { pus_5_handler }
EventServiceWrapper {
handler: pus_5_handler,
}
}
pub struct Pus5Wrapper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus_5_handler:
PusService5EventHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub struct EventServiceWrapper<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> {
pub handler:
PusEventServiceHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
EventServiceWrapper<TmSender, TcInMemConverter>
{
pub fn handle_next_packet(&mut self) -> bool {
match self.pus_5_handler.handle_one_tc() {
pub fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool {
match self.handler.poll_and_handle_next_tc(time_stamp) {
Ok(result) => match result {
PusPacketHandlerResult::RequestHandled => {}
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {

View File

@ -1,50 +1,127 @@
use derive_new::new;
use log::{error, warn};
use satrs::hk::{CollectionIntervalFactor, HkRequest};
use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::pus::hk::{PusHkToRequestConverter, PusService3HkHandler};
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::hk::{CollectionIntervalFactor, HkRequest, HkRequestVariant, UniqueId};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReportingProvider, VerificationToken,
FailParams, TcStateAccepted, TcStateStarted, VerificationReporter,
VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult,
PusPacketHandlingError, PusServiceHelper, TmAsVecSenderWithId, TmAsVecSenderWithMpsc,
TmInSharedPoolSenderWithBoundedMpsc, TmInSharedPoolSenderWithId,
ActivePusRequestStd, ActiveRequestProvider, DefaultActiveRequestMap, EcssTcAndToken,
EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSenderCore,
EcssTmtcError, GenericConversionError, MpscTcReceiver, MpscTmAsVecSender,
MpscTmInSharedPoolSenderBounded, PusPacketHandlerResult, PusReplyHandler, PusServiceHelper,
PusTcToRequestConverter, PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::request::TargetAndApidId;
use satrs::request::{GenericMessage, UniqueApidTargetId};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::{hk, PusPacket};
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::{ChannelId, TargetId};
use satrs_example::config::{hk_err, tmtc_err, TcReceiverId, TmSenderId, PUS_APID};
use std::sync::mpsc::{self};
use satrs_example::config::components::PUS_HK_SERVICE;
use satrs_example::config::{hk_err, tmtc_err};
use std::sync::mpsc;
use std::time::Duration;
use crate::pus::{create_verification_reporter, generic_pus_request_timeout_handler};
use crate::requests::GenericRequestRouter;
use super::GenericRoutingErrorHandler;
use super::{HandlingStatus, PusTargetedRequestService};
#[derive(Clone, PartialEq, Debug, new)]
pub struct HkReply {
pub unique_id: UniqueId,
pub variant: HkReplyVariant,
}
#[derive(Clone, PartialEq, Debug)]
pub enum HkReplyVariant {
Ack,
}
#[derive(Default)]
pub struct ExampleHkRequestConverter {}
pub struct HkReplyHandler {}
impl PusHkToRequestConverter for ExampleHkRequestConverter {
type Error = PusPacketHandlingError;
impl PusReplyHandler<ActivePusRequestStd, HkReply> for HkReplyHandler {
type Error = EcssTmtcError;
fn handle_unrequested_reply(
&mut self,
reply: &GenericMessage<HkReply>,
_tm_sender: &impl EcssTmSenderCore,
) -> Result<(), Self::Error> {
log::warn!("received unexpected reply for service 3: {reply:?}");
Ok(())
}
fn handle_reply(
&mut self,
reply: &GenericMessage<HkReply>,
active_request: &ActivePusRequestStd,
tm_sender: &impl EcssTmSenderCore,
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<bool, Self::Error> {
let started_token: VerificationToken<TcStateStarted> = active_request
.token()
.try_into()
.expect("invalid token state");
match reply.message.variant {
HkReplyVariant::Ack => {
verification_handler
.completion_success(tm_sender, started_token, time_stamp)
.expect("sending completion success verification failed");
}
};
Ok(true)
}
fn handle_request_timeout(
&mut self,
active_request: &ActivePusRequestStd,
tm_sender: &impl EcssTmSenderCore,
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<(), Self::Error> {
generic_pus_request_timeout_handler(
tm_sender,
active_request,
verification_handler,
time_stamp,
"HK",
)?;
Ok(())
}
}
pub struct HkRequestConverter {
timeout: Duration,
}
impl Default for HkRequestConverter {
fn default() -> Self {
Self {
timeout: Duration::from_secs(60),
}
}
}
impl PusTcToRequestConverter<ActivePusRequestStd, HkRequest> for HkRequestConverter {
type Error = GenericConversionError;
fn convert(
&mut self,
token: VerificationToken<TcStateAccepted>,
tc: &PusTcReader,
time_stamp: &[u8],
tm_sender: &(impl EcssTmSenderCore + ?Sized),
verif_reporter: &impl VerificationReportingProvider,
) -> Result<(TargetId, HkRequest), Self::Error> {
time_stamp: &[u8],
) -> Result<(ActivePusRequestStd, HkRequest), Self::Error> {
let user_data = tc.user_data();
if user_data.is_empty() {
let user_data_len = user_data.len() as u32;
let user_data_len_raw = user_data_len.to_be_bytes();
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new(
time_stamp,
@ -53,7 +130,7 @@ impl PusHkToRequestConverter for ExampleHkRequestConverter {
),
)
.expect("Sending start failure TM failed");
return Err(PusPacketHandlingError::NotEnoughAppData {
return Err(GenericConversionError::NotEnoughAppData {
expected: 4,
found: 0,
});
@ -67,178 +144,164 @@ impl PusHkToRequestConverter for ExampleHkRequestConverter {
let user_data_len = user_data.len() as u32;
let user_data_len_raw = user_data_len.to_be_bytes();
verif_reporter
.start_failure(token, FailParams::new(time_stamp, err, &user_data_len_raw))
.start_failure(
tm_sender,
token,
FailParams::new(time_stamp, err, &user_data_len_raw),
)
.expect("Sending start failure TM failed");
return Err(PusPacketHandlingError::NotEnoughAppData {
return Err(GenericConversionError::NotEnoughAppData {
expected: 8,
found: 4,
});
}
let subservice = tc.subservice();
let target_id = TargetAndApidId::from_pus_tc(tc).expect("invalid tc format");
let target_id_and_apid = UniqueApidTargetId::from_pus_tc(tc).expect("invalid tc format");
let unique_id = u32::from_be_bytes(tc.user_data()[4..8].try_into().unwrap());
let standard_subservice = hk::Subservice::try_from(subservice);
if standard_subservice.is_err() {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new(time_stamp, &tmtc_err::INVALID_PUS_SUBSERVICE, &[subservice]),
)
.expect("Sending start failure TM failed");
return Err(PusPacketHandlingError::InvalidSubservice(subservice));
return Err(GenericConversionError::InvalidSubservice(subservice));
}
Ok((
target_id.into(),
match standard_subservice.unwrap() {
hk::Subservice::TcEnableHkGeneration | hk::Subservice::TcEnableDiagGeneration => {
HkRequest::Enable(unique_id)
}
hk::Subservice::TcDisableHkGeneration | hk::Subservice::TcDisableDiagGeneration => {
HkRequest::Disable(unique_id)
}
hk::Subservice::TcReportHkReportStructures => todo!(),
hk::Subservice::TmHkPacket => todo!(),
hk::Subservice::TcGenerateOneShotHk | hk::Subservice::TcGenerateOneShotDiag => {
HkRequest::OneShot(unique_id)
}
hk::Subservice::TcModifyDiagCollectionInterval
| hk::Subservice::TcModifyHkCollectionInterval => {
if user_data.len() < 12 {
verif_reporter
.start_failure(
token,
FailParams::new_no_fail_data(
time_stamp,
&tmtc_err::NOT_ENOUGH_APP_DATA,
),
)
.expect("Sending start failure TM failed");
return Err(PusPacketHandlingError::NotEnoughAppData {
expected: 12,
found: user_data.len(),
});
}
HkRequest::ModifyCollectionInterval(
unique_id,
CollectionIntervalFactor::from_be_bytes(
user_data[8..12].try_into().unwrap(),
),
)
}
_ => {
let request = match standard_subservice.unwrap() {
hk::Subservice::TcEnableHkGeneration | hk::Subservice::TcEnableDiagGeneration => {
HkRequest::new(unique_id, HkRequestVariant::EnablePeriodic)
}
hk::Subservice::TcDisableHkGeneration | hk::Subservice::TcDisableDiagGeneration => {
HkRequest::new(unique_id, HkRequestVariant::DisablePeriodic)
}
hk::Subservice::TcReportHkReportStructures => todo!(),
hk::Subservice::TmHkPacket => todo!(),
hk::Subservice::TcGenerateOneShotHk | hk::Subservice::TcGenerateOneShotDiag => {
HkRequest::new(unique_id, HkRequestVariant::OneShot)
}
hk::Subservice::TcModifyDiagCollectionInterval
| hk::Subservice::TcModifyHkCollectionInterval => {
if user_data.len() < 12 {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new(
FailParams::new_no_fail_data(
time_stamp,
&tmtc_err::PUS_SUBSERVICE_NOT_IMPLEMENTED,
&[subservice],
&tmtc_err::NOT_ENOUGH_APP_DATA,
),
)
.expect("Sending start failure TM failed");
return Err(PusPacketHandlingError::InvalidSubservice(subservice));
return Err(GenericConversionError::NotEnoughAppData {
expected: 12,
found: user_data.len(),
});
}
},
HkRequest::new(
unique_id,
HkRequestVariant::ModifyCollectionInterval(
CollectionIntervalFactor::from_be_bytes(
user_data[8..12].try_into().unwrap(),
),
),
)
}
_ => {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new(
time_stamp,
&tmtc_err::PUS_SUBSERVICE_NOT_IMPLEMENTED,
&[subservice],
),
)
.expect("Sending start failure TM failed");
return Err(GenericConversionError::InvalidSubservice(subservice));
}
};
Ok((
ActivePusRequestStd::new(target_id_and_apid.into(), token, self.timeout),
request,
))
}
}
pub fn create_hk_service_static(
shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_pool: SharedStaticMemoryPool,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter,
) -> Pus3Wrapper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let hk_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusHk as ChannelId,
"PUS_3_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let hk_srv_receiver =
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
let pus_3_handler = PusService3HkHandler::new(
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
) -> HkServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let pus_3_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
hk_srv_receiver,
hk_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_HK_SERVICE.id(),
pus_hk_rx,
tm_sender,
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
),
ExampleHkRequestConverter::default(),
HkRequestConverter::default(),
DefaultActiveRequestMap::default(),
HkReplyHandler::default(),
request_router,
GenericRoutingErrorHandler::default(),
reply_receiver,
);
Pus3Wrapper { pus_3_handler }
HkServiceWrapper {
service: pus_3_handler,
}
}
pub fn create_hk_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithVecMpscSender,
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter,
) -> Pus3Wrapper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let hk_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusHk as ChannelId,
"PUS_3_TM_SENDER",
tm_funnel_tx.clone(),
);
let hk_srv_receiver =
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
let pus_3_handler = PusService3HkHandler::new(
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
) -> HkServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus_3_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
hk_srv_receiver,
hk_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_HK_SERVICE.id(),
pus_hk_rx,
tm_funnel_tx,
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
EcssTcInVecConverter::default(),
),
ExampleHkRequestConverter::default(),
HkRequestConverter::default(),
DefaultActiveRequestMap::default(),
HkReplyHandler::default(),
request_router,
GenericRoutingErrorHandler::default(),
reply_receiver,
);
Pus3Wrapper { pus_3_handler }
HkServiceWrapper {
service: pus_3_handler,
}
}
pub struct Pus3Wrapper<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub(crate) pus_3_handler: PusService3HkHandler<
TcReceiver,
pub struct HkServiceWrapper<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> {
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
ExampleHkRequestConverter,
GenericRequestRouter,
GenericRoutingErrorHandler<3>,
HkRequestConverter,
HkReplyHandler,
DefaultActiveRequestMap<ActivePusRequestStd>,
ActivePusRequestStd,
HkRequest,
HkReply,
>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
HkServiceWrapper<TmSender, TcInMemConverter>
{
pub fn handle_next_packet(&mut self) -> bool {
match self.pus_3_handler.handle_one_tc() {
pub fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool {
match self.service.poll_and_handle_next_tc(time_stamp) {
Ok(result) => match result {
PusPacketHandlerResult::RequestHandled => {}
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
@ -260,4 +323,242 @@ impl<
}
false
}
pub fn poll_and_handle_next_reply(&mut self, time_stamp: &[u8]) -> HandlingStatus {
// This only fails if all senders disconnected. Treat it like an empty queue.
self.service
.poll_and_check_next_reply(time_stamp)
.unwrap_or_else(|e| {
warn!("PUS 3: Handling reply failed with error {e:?}");
HandlingStatus::Empty
})
}
pub fn check_for_request_timeouts(&mut self) {
self.service.check_for_request_timeouts();
}
}
#[cfg(test)]
mod tests {
use satrs::pus::test_util::{
TEST_COMPONENT_ID_0, TEST_COMPONENT_ID_1, TEST_UNIQUE_ID_0, TEST_UNIQUE_ID_1,
};
use satrs::request::MessageMetadata;
use satrs::{
hk::HkRequestVariant,
pus::test_util::TEST_APID,
request::GenericMessage,
spacepackets::{
ecss::{hk::Subservice, tc::PusTcCreator},
SpHeader,
},
};
use satrs_example::config::tmtc_err;
use crate::pus::{
hk::HkReplyVariant,
tests::{PusConverterTestbench, ReplyHandlerTestbench},
};
use super::{HkReply, HkReplyHandler, HkRequestConverter};
#[test]
fn hk_converter_one_shot_req() {
let mut hk_bench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), HkRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let target_id = TEST_UNIQUE_ID_0;
let unique_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
app_data[0..4].copy_from_slice(&target_id.to_be_bytes());
app_data[4..8].copy_from_slice(&unique_id.to_be_bytes());
let hk_req = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcGenerateOneShotHk as u8,
&app_data,
true,
);
let accepted_token = hk_bench.add_tc(&hk_req);
let (_active_req, req) = hk_bench
.convert(accepted_token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion failed");
assert_eq!(req.unique_id, unique_id);
if let HkRequestVariant::OneShot = req.variant {
} else {
panic!("unexpected HK request")
}
}
#[test]
fn hk_converter_enable_periodic_generation() {
let mut hk_bench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), HkRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let target_id = TEST_UNIQUE_ID_0;
let unique_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
app_data[0..4].copy_from_slice(&target_id.to_be_bytes());
app_data[4..8].copy_from_slice(&unique_id.to_be_bytes());
let mut generic_check = |tc: &PusTcCreator| {
let accepted_token = hk_bench.add_tc(tc);
let (_active_req, req) = hk_bench
.convert(accepted_token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion failed");
assert_eq!(req.unique_id, unique_id);
if let HkRequestVariant::EnablePeriodic = req.variant {
} else {
panic!("unexpected HK request")
}
};
let tc0 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcEnableHkGeneration as u8,
&app_data,
true,
);
generic_check(&tc0);
let tc1 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcEnableDiagGeneration as u8,
&app_data,
true,
);
generic_check(&tc1);
}
#[test]
fn hk_conversion_disable_periodic_generation() {
let mut hk_bench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), HkRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let target_id = TEST_UNIQUE_ID_0;
let unique_id = 5_u32;
let mut app_data: [u8; 8] = [0; 8];
app_data[0..4].copy_from_slice(&target_id.to_be_bytes());
app_data[4..8].copy_from_slice(&unique_id.to_be_bytes());
let mut generic_check = |tc: &PusTcCreator| {
let accepted_token = hk_bench.add_tc(tc);
let (_active_req, req) = hk_bench
.convert(accepted_token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion failed");
assert_eq!(req.unique_id, unique_id);
if let HkRequestVariant::DisablePeriodic = req.variant {
} else {
panic!("unexpected HK request")
}
};
let tc0 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcDisableHkGeneration as u8,
&app_data,
true,
);
generic_check(&tc0);
let tc1 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcDisableDiagGeneration as u8,
&app_data,
true,
);
generic_check(&tc1);
}
#[test]
fn hk_conversion_modify_interval() {
let mut hk_bench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), HkRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let target_id = TEST_UNIQUE_ID_0;
let unique_id = 5_u32;
let mut app_data: [u8; 12] = [0; 12];
let collection_interval_factor = 5_u32;
app_data[0..4].copy_from_slice(&target_id.to_be_bytes());
app_data[4..8].copy_from_slice(&unique_id.to_be_bytes());
app_data[8..12].copy_from_slice(&collection_interval_factor.to_be_bytes());
let mut generic_check = |tc: &PusTcCreator| {
let accepted_token = hk_bench.add_tc(tc);
let (_active_req, req) = hk_bench
.convert(accepted_token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion failed");
assert_eq!(req.unique_id, unique_id);
if let HkRequestVariant::ModifyCollectionInterval(interval_factor) = req.variant {
assert_eq!(interval_factor, collection_interval_factor);
} else {
panic!("unexpected HK request")
}
};
let tc0 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcModifyHkCollectionInterval as u8,
&app_data,
true,
);
generic_check(&tc0);
let tc1 = PusTcCreator::new_simple(
sp_header,
3,
Subservice::TcModifyDiagCollectionInterval as u8,
&app_data,
true,
);
generic_check(&tc1);
}
#[test]
fn hk_reply_handler() {
let mut reply_testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_0.id(), HkReplyHandler::default());
let sender_id = 2_u64;
let apid_target_id = 3_u32;
let unique_id = 5_u32;
let (req_id, active_req) = reply_testbench.add_tc(TEST_APID, apid_target_id, &[]);
let reply = GenericMessage::new(
MessageMetadata::new(req_id.into(), sender_id),
HkReply::new(unique_id, HkReplyVariant::Ack),
);
let result = reply_testbench.handle_reply(&reply, &active_req, &[]);
assert!(result.is_ok());
assert!(result.unwrap());
reply_testbench
.verif_reporter
.assert_full_completion_success(TEST_COMPONENT_ID_0.raw(), req_id, None);
}
#[test]
fn reply_handling_unrequested_reply() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_1.id(), HkReplyHandler::default());
let action_reply = HkReply::new(5_u32, HkReplyVariant::Ack);
let unrequested_reply =
GenericMessage::new(MessageMetadata::new(10_u32, 15_u64), action_reply);
// Right now this function does not do a lot. We simply check that it does not panic or do
// weird stuff.
let result = testbench.handle_unrequested_reply(&unrequested_reply);
assert!(result.is_ok());
}
#[test]
fn reply_handling_reply_timeout() {
let mut testbench =
ReplyHandlerTestbench::new(TEST_COMPONENT_ID_1.id(), HkReplyHandler::default());
let (req_id, active_request) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_1, &[]);
let result = testbench.handle_request_timeout(&active_request, &[]);
assert!(result.is_ok());
testbench.verif_reporter.assert_completion_failure(
TEST_COMPONENT_ID_1.raw(),
req_id,
None,
tmtc_err::REQUEST_TIMEOUT.raw() as u64,
);
}
}

View File

@ -1,75 +1,80 @@
use crate::requests::GenericRequestRouter;
use crate::tmtc::MpscStoreAndSendError;
use log::warn;
use satrs::pus::verification::{FailParams, VerificationReportingProvider};
use satrs::pus::{
EcssTcAndToken, GenericRoutingError, PusPacketHandlerResult, PusRoutingErrorHandler, TcInMemory,
use satrs::pus::verification::{
self, FailParams, TcStateAccepted, TcStateStarted, VerificationReporter,
VerificationReporterCfg, VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
ActiveRequestMapProvider, ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter,
EcssTcReceiverCore, EcssTmSenderCore, EcssTmtcError, GenericConversionError,
GenericRoutingError, PusPacketHandlerResult, PusPacketHandlingError, PusReplyHandler,
PusRequestRouter, PusServiceHelper, PusTcToRequestConverter, TcInMemory,
};
use satrs::queue::GenericReceiveError;
use satrs::request::{Apid, GenericMessage, MessageMetadata};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::PusServiceId;
use satrs::spacepackets::time::cds::CdsTime;
use satrs::spacepackets::time::TimeWriter;
use satrs::ComponentId;
use satrs_example::config::components::PUS_ROUTING_SERVICE;
use satrs_example::config::{tmtc_err, CustomPusServiceId};
use std::sync::mpsc::Sender;
use satrs_example::TimeStampHelper;
use std::fmt::Debug;
use std::sync::mpsc::{self, Sender};
pub mod action;
pub mod event;
pub mod hk;
pub mod mode;
pub mod scheduler;
pub mod stack;
pub mod test;
pub struct PusTcMpscRouter {
pub test_service_receiver: Sender<EcssTcAndToken>,
pub event_service_receiver: Sender<EcssTcAndToken>,
pub sched_service_receiver: Sender<EcssTcAndToken>,
pub hk_service_receiver: Sender<EcssTcAndToken>,
pub action_service_receiver: Sender<EcssTcAndToken>,
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum HandlingStatus {
Empty,
HandledOne,
}
pub struct PusReceiver<VerificationReporter: VerificationReportingProvider> {
pub fn create_verification_reporter(owner_id: ComponentId, apid: Apid) -> VerificationReporter {
let verif_cfg = VerificationReporterCfg::new(apid, 1, 2, 8).unwrap();
// Every software component which needs to generate verification telemetry, gets a cloned
// verification reporter.
VerificationReporter::new(owner_id, &verif_cfg)
}
/// Simple router structure which forwards PUS telecommands to dedicated handlers.
pub struct PusTcMpscRouter {
pub test_tc_sender: Sender<EcssTcAndToken>,
pub event_tc_sender: Sender<EcssTcAndToken>,
pub sched_tc_sender: Sender<EcssTcAndToken>,
pub hk_tc_sender: Sender<EcssTcAndToken>,
pub action_tc_sender: Sender<EcssTcAndToken>,
pub mode_tc_sender: Sender<EcssTcAndToken>,
}
pub struct PusReceiver<TmSender: EcssTmSenderCore> {
pub id: ComponentId,
pub tm_sender: TmSender,
pub verif_reporter: VerificationReporter,
pub pus_router: PusTcMpscRouter,
stamp_helper: TimeStampHelper,
}
struct TimeStampHelper {
stamper: CdsTime,
time_stamp: [u8; 7],
}
impl TimeStampHelper {
pub fn new() -> Self {
impl<TmSender: EcssTmSenderCore> PusReceiver<TmSender> {
pub fn new(tm_sender: TmSender, pus_router: PusTcMpscRouter) -> Self {
Self {
stamper: CdsTime::new_with_u16_days(0, 0),
time_stamp: [0; 7],
}
}
pub fn stamp(&self) -> &[u8] {
&self.time_stamp
}
pub fn update_from_now(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
}
impl<VerificationReporter: VerificationReportingProvider> PusReceiver<VerificationReporter> {
pub fn new(verif_reporter: VerificationReporter, pus_router: PusTcMpscRouter) -> Self {
Self {
verif_reporter,
id: PUS_ROUTING_SERVICE.raw(),
tm_sender,
verif_reporter: create_verification_reporter(
PUS_ROUTING_SERVICE.id(),
PUS_ROUTING_SERVICE.apid,
),
pus_router,
stamp_helper: TimeStampHelper::new(),
stamp_helper: TimeStampHelper::default(),
}
}
}
impl<VerificationReporter: VerificationReportingProvider> PusReceiver<VerificationReporter> {
pub fn handle_tc_packet(
&mut self,
tc_in_memory: TcInMemory,
@ -80,41 +85,34 @@ impl<VerificationReporter: VerificationReportingProvider> PusReceiver<Verificati
self.stamp_helper.update_from_now();
let accepted_token = self
.verif_reporter
.acceptance_success(init_token, self.stamp_helper.stamp())
.acceptance_success(&self.tm_sender, init_token, self.stamp_helper.stamp())
.expect("Acceptance success failure");
let service = PusServiceId::try_from(service);
match service {
Ok(standard_service) => match standard_service {
PusServiceId::Test => {
self.pus_router.test_service_receiver.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
}
PusServiceId::Test => self.pus_router.test_tc_sender.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?,
PusServiceId::Housekeeping => {
self.pus_router.hk_service_receiver.send(EcssTcAndToken {
self.pus_router.hk_tc_sender.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
}
PusServiceId::Event => {
self.pus_router
.event_service_receiver
.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
}
PusServiceId::Event => self.pus_router.event_tc_sender.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?,
PusServiceId::Scheduling => {
self.pus_router
.sched_service_receiver
.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
self.pus_router.sched_tc_sender.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
}
_ => {
let result = self.verif_reporter.start_failure(
&self.tm_sender,
accepted_token,
FailParams::new(
self.stamp_helper.stamp(),
@ -131,14 +129,17 @@ impl<VerificationReporter: VerificationReportingProvider> PusReceiver<Verificati
if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) {
match custom_service {
CustomPusServiceId::Mode => {
// TODO: Fix mode service.
//self.handle_mode_service(pus_tc, accepted_token)
self.pus_router.mode_tc_sender.send(EcssTcAndToken {
tc_in_memory,
token: Some(accepted_token.into()),
})?
}
CustomPusServiceId::Health => {}
}
} else {
self.verif_reporter
.start_failure(
&self.tm_sender,
accepted_token,
FailParams::new(
self.stamp_helper.stamp(),
@ -154,55 +155,550 @@ impl<VerificationReporter: VerificationReportingProvider> PusReceiver<Verificati
}
}
#[derive(Default)]
pub struct GenericRoutingErrorHandler<const SERVICE_ID: u8> {}
pub trait TargetedPusService {
/// Returns [true] if the packet handling is finished.
fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool;
fn poll_and_handle_next_reply(&mut self, time_stamp: &[u8]) -> HandlingStatus;
fn check_for_request_timeouts(&mut self);
}
impl<const SERVICE_ID: u8> PusRoutingErrorHandler for GenericRoutingErrorHandler<SERVICE_ID> {
type Error = satrs::pus::GenericRoutingError;
/// This is a generic handler class for all PUS services where a PUS telecommand is converted
/// to a targeted request.
///
/// The generic steps for this process are the following
///
/// 1. Poll for TC packets
/// 2. Convert the raw packets to a [PusTcReader].
/// 3. Convert the PUS TC to a typed request using the [PusTcToRequestConverter].
/// 4. Route the requests using the [GenericRequestRouter].
/// 5. Add the request to the active request map using the [ActiveRequestMapProvider] abstraction.
/// 6. Check for replies which complete the forwarded request. The handler takes care of
/// the verification process.
/// 7. Check for timeouts of active requests. Generally, the timeout on the service level should
/// be highest expected timeout for the given target.
///
/// The handler exposes the following API:
///
/// 1. [Self::handle_one_tc] which tries to poll and handle one TC packet, covering steps 1-5.
/// 2. [Self::check_one_reply] which tries to poll and handle one reply, covering step 6.
/// 3. [Self::check_for_request_timeouts] which checks for request timeouts, covering step 7.
pub struct PusTargetedRequestService<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
ActiveRequestMap: ActiveRequestMapProvider<ActiveRequestInfo>,
ActiveRequestInfo: ActiveRequestProvider,
RequestType,
ReplyType,
> {
pub service_helper:
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub request_router: GenericRequestRouter,
pub request_converter: RequestConverter,
pub active_request_map: ActiveRequestMap,
pub reply_handler: ReplyHandler,
pub reply_receiver: mpsc::Receiver<GenericMessage<ReplyType>>,
phantom: std::marker::PhantomData<(RequestType, ActiveRequestInfo, ReplyType)>,
}
fn handle_error(
&self,
target_id: satrs::TargetId,
token: satrs::pus::verification::VerificationToken<
satrs::pus::verification::TcStateAccepted,
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
ActiveRequestMap: ActiveRequestMapProvider<ActiveRequestInfo>,
ActiveRequestInfo: ActiveRequestProvider,
RequestType,
ReplyType,
>
PusTargetedRequestService<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
RequestConverter,
ReplyHandler,
ActiveRequestMap,
ActiveRequestInfo,
RequestType,
ReplyType,
>
where
GenericRequestRouter: PusRequestRouter<RequestType, Error = GenericRoutingError>,
{
pub fn new(
service_helper: PusServiceHelper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
_tc: &PusTcReader,
error: Self::Error,
request_converter: RequestConverter,
active_request_map: ActiveRequestMap,
reply_hook: ReplyHandler,
request_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ReplyType>>,
) -> Self {
Self {
service_helper,
request_converter,
active_request_map,
reply_handler: reply_hook,
request_router,
reply_receiver,
phantom: std::marker::PhantomData,
}
}
pub fn poll_and_handle_next_tc(
&mut self,
time_stamp: &[u8],
) -> Result<PusPacketHandlerResult, PusPacketHandlingError> {
let possible_packet = self.service_helper.retrieve_and_accept_next_packet()?;
if possible_packet.is_none() {
return Ok(PusPacketHandlerResult::Empty);
}
let ecss_tc_and_token = possible_packet.unwrap();
self.service_helper
.tc_in_mem_converter_mut()
.cache(&ecss_tc_and_token.tc_in_memory)?;
let tc = self.service_helper.tc_in_mem_converter().convert()?;
let (mut request_info, request) = match self.request_converter.convert(
ecss_tc_and_token.token,
&tc,
self.service_helper.tm_sender(),
&self.service_helper.common.verif_reporter,
time_stamp,
) {
Ok((info, req)) => (info, req),
Err(e) => {
self.handle_conversion_to_request_error(&e, ecss_tc_and_token.token, time_stamp);
return Err(e.into());
}
};
let accepted_token: VerificationToken<TcStateAccepted> = request_info
.token()
.try_into()
.expect("token not in expected accepted state");
let verif_request_id = verification::RequestId::new(&tc).raw();
match self.request_router.route(
MessageMetadata::new(verif_request_id, self.service_helper.id()),
request_info.target_id(),
request,
) {
Ok(()) => {
let started_token = self
.service_helper
.verif_reporter()
.start_success(
&self.service_helper.common.tm_sender,
accepted_token,
time_stamp,
)
.expect("Start success failure");
request_info.set_token(started_token.into());
self.active_request_map
.insert(&verif_request_id, request_info);
}
Err(e) => {
self.request_router.handle_error_generic(
&request_info,
&tc,
e.clone(),
self.service_helper.tm_sender(),
self.service_helper.verif_reporter(),
time_stamp,
);
return Err(e.into());
}
}
Ok(PusPacketHandlerResult::RequestHandled)
}
fn handle_conversion_to_request_error(
&mut self,
error: &GenericConversionError,
token: VerificationToken<TcStateAccepted>,
time_stamp: &[u8],
verif_reporter: &impl VerificationReportingProvider,
) {
warn!("Routing request for service {SERVICE_ID} failed: {error:?}");
match error {
GenericRoutingError::UnknownTargetId(id) => {
let mut fail_data: [u8; 8] = [0; 8];
fail_data.copy_from_slice(&id.to_be_bytes());
verif_reporter
.start_failure(
GenericConversionError::WrongService(service) => {
let service_slice: [u8; 1] = [*service];
self.service_helper
.verif_reporter()
.completion_failure(
self.service_helper.tm_sender(),
token,
FailParams::new(time_stamp, &tmtc_err::UNKNOWN_TARGET_ID, &fail_data),
FailParams::new(time_stamp, &tmtc_err::INVALID_PUS_SERVICE, &service_slice),
)
.expect("Sending start failure failed");
.expect("Sending completion failure failed");
}
GenericRoutingError::SendError(_) => {
let mut fail_data: [u8; 8] = [0; 8];
fail_data.copy_from_slice(&target_id.to_be_bytes());
verif_reporter
.start_failure(
GenericConversionError::InvalidSubservice(subservice) => {
let subservice_slice: [u8; 1] = [*subservice];
self.service_helper
.verif_reporter()
.completion_failure(
self.service_helper.tm_sender(),
token,
FailParams::new(time_stamp, &tmtc_err::ROUTING_ERROR, &fail_data),
FailParams::new(
time_stamp,
&tmtc_err::INVALID_PUS_SUBSERVICE,
&subservice_slice,
),
)
.expect("Sending start failure failed");
.expect("Sending completion failure failed");
}
GenericRoutingError::NotEnoughAppData { expected, found } => {
let mut context_info = (found as u32).to_be_bytes().to_vec();
context_info.extend_from_slice(&(expected as u32).to_be_bytes());
verif_reporter
.start_failure(
GenericConversionError::NotEnoughAppData { expected, found } => {
let mut context_info = (*found as u32).to_be_bytes().to_vec();
context_info.extend_from_slice(&(*expected as u32).to_be_bytes());
self.service_helper
.verif_reporter()
.completion_failure(
self.service_helper.tm_sender(),
token,
FailParams::new(time_stamp, &tmtc_err::NOT_ENOUGH_APP_DATA, &context_info),
)
.expect("Sending start failure failed");
.expect("Sending completion failure failed");
}
// Do nothing.. this is service-level and can not be handled generically here.
GenericConversionError::InvalidAppData(_) => (),
}
}
pub fn poll_and_check_next_reply(
&mut self,
time_stamp: &[u8],
) -> Result<HandlingStatus, EcssTmtcError> {
match self.reply_receiver.try_recv() {
Ok(reply) => {
self.handle_reply(&reply, time_stamp)?;
Ok(HandlingStatus::HandledOne)
}
Err(e) => match e {
mpsc::TryRecvError::Empty => Ok(HandlingStatus::Empty),
mpsc::TryRecvError::Disconnected => Err(EcssTmtcError::Receive(
GenericReceiveError::TxDisconnected(None),
)),
},
}
}
pub fn handle_reply(
&mut self,
reply: &GenericMessage<ReplyType>,
time_stamp: &[u8],
) -> Result<(), EcssTmtcError> {
let active_req_opt = self.active_request_map.get(reply.request_id());
if active_req_opt.is_none() {
self.reply_handler
.handle_unrequested_reply(reply, &self.service_helper.common.tm_sender)?;
return Ok(());
}
let active_request = active_req_opt.unwrap();
let request_finished = self
.reply_handler
.handle_reply(
reply,
active_request,
&self.service_helper.common.tm_sender,
&self.service_helper.common.verif_reporter,
time_stamp,
)
.unwrap_or(false);
if request_finished {
self.active_request_map.remove(reply.request_id());
}
Ok(())
}
pub fn check_for_request_timeouts(&mut self) {
let mut requests_to_delete = Vec::new();
self.active_request_map
.for_each(|request_id, request_info| {
if request_info.has_timed_out() {
requests_to_delete.push(*request_id);
}
});
if !requests_to_delete.is_empty() {
for request_id in requests_to_delete {
self.active_request_map.remove(request_id);
}
}
}
}
/// Generic timeout handling: Handle the verification failure with a dedicated return code
/// and also log the error.
pub fn generic_pus_request_timeout_handler(
sender: &(impl EcssTmSenderCore + ?Sized),
active_request: &(impl ActiveRequestProvider + Debug),
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
service_str: &'static str,
) -> Result<(), EcssTmtcError> {
log::warn!("timeout for active request {active_request:?} on {service_str} service");
let started_token: VerificationToken<TcStateStarted> = active_request
.token()
.try_into()
.expect("token not in expected started state");
verification_handler.completion_failure(
sender,
started_token,
FailParams::new(time_stamp, &tmtc_err::REQUEST_TIMEOUT, &[]),
)?;
Ok(())
}
#[cfg(test)]
pub(crate) mod tests {
use std::time::Duration;
use satrs::pus::test_util::TEST_COMPONENT_ID_0;
use satrs::pus::{MpscTmAsVecSender, PusTmAsVec, PusTmVariant};
use satrs::request::RequestId;
use satrs::{
pus::{
verification::test_util::TestVerificationReporter, ActivePusRequestStd,
ActiveRequestMapProvider, EcssTcInVecConverter, MpscTcReceiver,
},
request::UniqueApidTargetId,
spacepackets::{
ecss::{
tc::{PusTcCreator, PusTcSecondaryHeader},
WritablePusPacket,
},
SpHeader,
},
};
use crate::requests::CompositeRequest;
use super::*;
// Testbench dedicated to the testing of [PusReplyHandler]s
pub struct ReplyHandlerTestbench<
ReplyHandler: PusReplyHandler<ActiveRequestInfo, Reply, Error = EcssTmtcError>,
ActiveRequestInfo: ActiveRequestProvider,
Reply,
> {
pub id: ComponentId,
pub verif_reporter: TestVerificationReporter,
pub reply_handler: ReplyHandler,
pub tm_receiver: mpsc::Receiver<PusTmAsVec>,
pub default_timeout: Duration,
tm_sender: MpscTmAsVecSender,
phantom: std::marker::PhantomData<(ActiveRequestInfo, Reply)>,
}
impl<
ReplyHandler: PusReplyHandler<ActiveRequestInfo, Reply, Error = EcssTmtcError>,
ActiveRequestInfo: ActiveRequestProvider,
Reply,
> ReplyHandlerTestbench<ReplyHandler, ActiveRequestInfo, Reply>
{
pub fn new(owner_id: ComponentId, reply_handler: ReplyHandler) -> Self {
let test_verif_reporter = TestVerificationReporter::new(owner_id);
let (tm_sender, tm_receiver) = mpsc::channel();
Self {
id: TEST_COMPONENT_ID_0.raw(),
verif_reporter: test_verif_reporter,
reply_handler,
default_timeout: Duration::from_secs(30),
tm_sender,
tm_receiver,
phantom: std::marker::PhantomData,
}
}
pub fn add_tc(
&mut self,
apid: u16,
apid_target: u32,
time_stamp: &[u8],
) -> (verification::RequestId, ActivePusRequestStd) {
let sp_header = SpHeader::new_from_apid(apid);
let sec_header_dummy = PusTcSecondaryHeader::new_simple(0, 0);
let init = self.verif_reporter.add_tc(&PusTcCreator::new(
sp_header,
sec_header_dummy,
&[],
true,
));
let accepted = self
.verif_reporter
.acceptance_success(&self.tm_sender, init, time_stamp)
.expect("acceptance failed");
let started = self
.verif_reporter
.start_success(&self.tm_sender, accepted, time_stamp)
.expect("start failed");
(
started.request_id(),
ActivePusRequestStd::new(
UniqueApidTargetId::new(apid, apid_target).raw(),
started,
self.default_timeout,
),
)
}
pub fn handle_reply(
&mut self,
reply: &GenericMessage<Reply>,
active_request: &ActiveRequestInfo,
time_stamp: &[u8],
) -> Result<bool, ReplyHandler::Error> {
self.reply_handler.handle_reply(
reply,
active_request,
&self.tm_sender,
&self.verif_reporter,
time_stamp,
)
}
pub fn handle_unrequested_reply(
&mut self,
reply: &GenericMessage<Reply>,
) -> Result<(), ReplyHandler::Error> {
self.reply_handler
.handle_unrequested_reply(reply, &self.tm_sender)
}
pub fn handle_request_timeout(
&mut self,
active_request_info: &ActiveRequestInfo,
time_stamp: &[u8],
) -> Result<(), ReplyHandler::Error> {
self.reply_handler.handle_request_timeout(
active_request_info,
&self.tm_sender,
&self.verif_reporter,
time_stamp,
)
}
}
#[derive(Default)]
pub struct DummySender {}
/// Dummy sender component which does nothing on the [Self::send_tm] call.
///
/// Useful for unit tests.
impl EcssTmSenderCore for DummySender {
fn send_tm(&self, _source_id: ComponentId, _tm: PusTmVariant) -> Result<(), EcssTmtcError> {
Ok(())
}
}
// Testbench dedicated to the testing of [PusTcToRequestConverter]s
pub struct PusConverterTestbench<
Converter: PusTcToRequestConverter<ActiveRequestInfo, Request, Error = GenericConversionError>,
ActiveRequestInfo: ActiveRequestProvider,
Request,
> {
pub id: ComponentId,
pub verif_reporter: TestVerificationReporter,
pub converter: Converter,
dummy_sender: DummySender,
current_request_id: Option<verification::RequestId>,
current_packet: Option<Vec<u8>>,
phantom: std::marker::PhantomData<(ActiveRequestInfo, Request)>,
}
impl<
Converter: PusTcToRequestConverter<ActiveRequestInfo, Request, Error = GenericConversionError>,
ActiveRequestInfo: ActiveRequestProvider,
Request,
> PusConverterTestbench<Converter, ActiveRequestInfo, Request>
{
pub fn new(owner_id: ComponentId, converter: Converter) -> Self {
let test_verif_reporter = TestVerificationReporter::new(owner_id);
Self {
id: owner_id,
verif_reporter: test_verif_reporter,
converter,
dummy_sender: DummySender::default(),
current_request_id: None,
current_packet: None,
phantom: std::marker::PhantomData,
}
}
pub fn add_tc(&mut self, tc: &PusTcCreator) -> VerificationToken<TcStateAccepted> {
let token = self.verif_reporter.add_tc(tc);
self.current_request_id = Some(verification::RequestId::new(tc));
self.current_packet = Some(tc.to_vec().unwrap());
self.verif_reporter
.acceptance_success(&self.dummy_sender, token, &[])
.expect("acceptance failed")
}
pub fn request_id(&self) -> Option<verification::RequestId> {
self.current_request_id
}
pub fn convert(
&mut self,
token: VerificationToken<TcStateAccepted>,
time_stamp: &[u8],
expected_apid: u16,
expected_apid_target: u32,
) -> Result<(ActiveRequestInfo, Request), Converter::Error> {
if self.current_packet.is_none() {
return Err(GenericConversionError::InvalidAppData(
"call add_tc first".to_string(),
));
}
let current_packet = self.current_packet.take().unwrap();
let tc_reader = PusTcReader::new(&current_packet).unwrap();
let (active_info, request) = self.converter.convert(
token,
&tc_reader.0,
&self.dummy_sender,
&self.verif_reporter,
time_stamp,
)?;
assert_eq!(
active_info.token().request_id(),
self.request_id().expect("no request id is set")
);
assert_eq!(
active_info.target_id(),
UniqueApidTargetId::new(expected_apid, expected_apid_target).raw()
);
Ok((active_info, request))
}
}
pub struct TargetedPusRequestTestbench<
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
ActiveRequestMap: ActiveRequestMapProvider<ActiveRequestInfo>,
ActiveRequestInfo: ActiveRequestProvider,
RequestType,
ReplyType,
> {
pub service: PusTargetedRequestService<
MpscTcReceiver,
MpscTmAsVecSender,
EcssTcInVecConverter,
TestVerificationReporter,
RequestConverter,
ReplyHandler,
ActiveRequestMap,
ActiveRequestInfo,
RequestType,
ReplyType,
>,
pub request_id: Option<RequestId>,
pub tm_funnel_rx: mpsc::Receiver<PusTmAsVec>,
pub pus_packet_tx: mpsc::Sender<EcssTcAndToken>,
pub reply_tx: mpsc::Sender<GenericMessage<ReplyType>>,
pub request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
}
}

View File

@ -0,0 +1,434 @@
use derive_new::new;
use log::{error, warn};
use std::sync::mpsc;
use std::time::Duration;
use crate::requests::GenericRequestRouter;
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
DefaultActiveRequestMap, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded,
PusPacketHandlerResult, PusServiceHelper, PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::request::GenericMessage;
use satrs::{
mode::{ModeAndSubmode, ModeReply, ModeRequest},
pus::{
mode::Subservice,
verification::{
self, FailParams, TcStateAccepted, TcStateStarted, VerificationReportingProvider,
VerificationToken,
},
ActivePusRequestStd, ActiveRequestProvider, EcssTmSenderCore, EcssTmtcError,
GenericConversionError, PusReplyHandler, PusTcToRequestConverter, PusTmVariant,
},
request::UniqueApidTargetId,
spacepackets::{
ecss::{
tc::PusTcReader,
tm::{PusTmCreator, PusTmSecondaryHeader},
PusPacket,
},
SpHeader,
},
ComponentId,
};
use satrs_example::config::components::PUS_MODE_SERVICE;
use satrs_example::config::{mode_err, tmtc_err};
use super::{
create_verification_reporter, generic_pus_request_timeout_handler, HandlingStatus,
PusTargetedRequestService, TargetedPusService,
};
#[derive(new)]
pub struct ModeReplyHandler {
owner_id: ComponentId,
}
impl PusReplyHandler<ActivePusRequestStd, ModeReply> for ModeReplyHandler {
type Error = EcssTmtcError;
fn handle_unrequested_reply(
&mut self,
reply: &GenericMessage<ModeReply>,
_tm_sender: &impl EcssTmSenderCore,
) -> Result<(), Self::Error> {
log::warn!("received unexpected reply for mode service 5: {reply:?}");
Ok(())
}
fn handle_reply(
&mut self,
reply: &GenericMessage<ModeReply>,
active_request: &ActivePusRequestStd,
tm_sender: &impl EcssTmSenderCore,
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<bool, Self::Error> {
let started_token: VerificationToken<TcStateStarted> = active_request
.token()
.try_into()
.expect("invalid token state");
match reply.message {
ModeReply::ModeReply(mode_reply) => {
let mut source_data: [u8; 12] = [0; 12];
mode_reply
.write_to_be_bytes(&mut source_data)
.expect("writing mode reply failed");
let req_id = verification::RequestId::from(reply.request_id());
let sp_header = SpHeader::new_for_unseg_tm(req_id.packet_id().apid(), 0, 0);
let sec_header =
PusTmSecondaryHeader::new(200, Subservice::TmModeReply as u8, 0, 0, time_stamp);
let pus_tm = PusTmCreator::new(sp_header, sec_header, &source_data, true);
tm_sender.send_tm(self.owner_id, PusTmVariant::Direct(pus_tm))?;
verification_handler.completion_success(tm_sender, started_token, time_stamp)?;
}
ModeReply::CantReachMode(error_code) => {
verification_handler.completion_failure(
tm_sender,
started_token,
FailParams::new(time_stamp, &error_code, &[]),
)?;
}
ModeReply::WrongMode { expected, reached } => {
let mut error_info: [u8; 24] = [0; 24];
let mut written_len = expected
.write_to_be_bytes(&mut error_info[0..ModeAndSubmode::RAW_LEN])
.expect("writing expected mode failed");
written_len += reached
.write_to_be_bytes(&mut error_info[ModeAndSubmode::RAW_LEN..])
.expect("writing reached mode failed");
verification_handler.completion_failure(
tm_sender,
started_token,
FailParams::new(
time_stamp,
&mode_err::WRONG_MODE,
&error_info[..written_len],
),
)?;
}
};
Ok(true)
}
fn handle_request_timeout(
&mut self,
active_request: &ActivePusRequestStd,
tm_sender: &impl EcssTmSenderCore,
verification_handler: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<(), Self::Error> {
generic_pus_request_timeout_handler(
tm_sender,
active_request,
verification_handler,
time_stamp,
"HK",
)?;
Ok(())
}
}
#[derive(Default)]
pub struct ModeRequestConverter {}
impl PusTcToRequestConverter<ActivePusRequestStd, ModeRequest> for ModeRequestConverter {
type Error = GenericConversionError;
fn convert(
&mut self,
token: VerificationToken<TcStateAccepted>,
tc: &PusTcReader,
tm_sender: &(impl EcssTmSenderCore + ?Sized),
verif_reporter: &impl VerificationReportingProvider,
time_stamp: &[u8],
) -> Result<(ActivePusRequestStd, ModeRequest), Self::Error> {
let subservice = tc.subservice();
let user_data = tc.user_data();
let not_enough_app_data = |expected: usize| {
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new_no_fail_data(time_stamp, &tmtc_err::NOT_ENOUGH_APP_DATA),
)
.expect("Sending start failure failed");
Err(GenericConversionError::NotEnoughAppData {
expected,
found: user_data.len(),
})
};
if user_data.len() < core::mem::size_of::<u32>() {
return not_enough_app_data(4);
}
let target_id_and_apid = UniqueApidTargetId::from_pus_tc(tc).unwrap();
let active_request =
ActivePusRequestStd::new(target_id_and_apid.into(), token, Duration::from_secs(30));
let subservice_typed = Subservice::try_from(subservice);
let invalid_subservice = || {
// Invalid subservice
verif_reporter
.start_failure(
tm_sender,
token,
FailParams::new_no_fail_data(time_stamp, &tmtc_err::INVALID_PUS_SUBSERVICE),
)
.expect("Sending start failure failed");
Err(GenericConversionError::InvalidSubservice(subservice))
};
if subservice_typed.is_err() {
return invalid_subservice();
}
let subservice_typed = subservice_typed.unwrap();
match subservice_typed {
Subservice::TcSetMode => {
if user_data.len() < core::mem::size_of::<u32>() + ModeAndSubmode::RAW_LEN {
return not_enough_app_data(4 + ModeAndSubmode::RAW_LEN);
}
let mode_and_submode = ModeAndSubmode::from_be_bytes(&tc.user_data()[4..])
.expect("mode and submode extraction failed");
Ok((active_request, ModeRequest::SetMode(mode_and_submode)))
}
Subservice::TcReadMode => Ok((active_request, ModeRequest::ReadMode)),
Subservice::TcAnnounceMode => Ok((active_request, ModeRequest::AnnounceMode)),
Subservice::TcAnnounceModeRecursive => {
Ok((active_request, ModeRequest::AnnounceModeRecursive))
}
_ => invalid_subservice(),
}
}
}
pub fn create_mode_service_static(
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_pool: SharedStaticMemoryPool,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
mode_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
) -> ModeServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let mode_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_MODE_SERVICE.id(),
pus_action_rx,
tm_sender,
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
),
ModeRequestConverter::default(),
DefaultActiveRequestMap::default(),
ModeReplyHandler::new(PUS_MODE_SERVICE.id()),
mode_router,
reply_receiver,
);
ModeServiceWrapper {
service: mode_request_handler,
}
}
pub fn create_mode_service_dynamic(
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
mode_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
) -> ModeServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let mode_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_MODE_SERVICE.id(),
pus_action_rx,
tm_funnel_tx,
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
EcssTcInVecConverter::default(),
),
ModeRequestConverter::default(),
DefaultActiveRequestMap::default(),
ModeReplyHandler::new(PUS_MODE_SERVICE.id()),
mode_router,
reply_receiver,
);
ModeServiceWrapper {
service: mode_request_handler,
}
}
pub struct ModeServiceWrapper<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> {
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
ModeRequestConverter,
ModeReplyHandler,
DefaultActiveRequestMap<ActivePusRequestStd>,
ActivePusRequestStd,
ModeRequest,
ModeReply,
>,
}
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
for ModeServiceWrapper<TmSender, TcInMemConverter>
{
/// Returns [true] if the packet handling is finished.
fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool {
match self.service.poll_and_handle_next_tc(time_stamp) {
Ok(result) => match result {
PusPacketHandlerResult::RequestHandled => {}
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
warn!("PUS mode service: partial packet handling success: {e:?}")
}
PusPacketHandlerResult::CustomSubservice(invalid, _) => {
warn!("PUS mode service: invalid subservice {invalid}");
}
PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => {
warn!("PUS mode service: {subservice} not implemented");
}
PusPacketHandlerResult::Empty => {
return true;
}
},
Err(error) => {
error!("PUS mode service: packet handling error: {error:?}")
}
}
false
}
fn poll_and_handle_next_reply(&mut self, time_stamp: &[u8]) -> HandlingStatus {
self.service
.poll_and_check_next_reply(time_stamp)
.unwrap_or_else(|e| {
warn!("PUS action service: Handling reply failed with error {e:?}");
HandlingStatus::HandledOne
})
}
fn check_for_request_timeouts(&mut self) {
self.service.check_for_request_timeouts();
}
}
#[cfg(test)]
mod tests {
use satrs::pus::test_util::{TEST_APID, TEST_COMPONENT_ID_0, TEST_UNIQUE_ID_0};
use satrs::request::MessageMetadata;
use satrs::{
mode::{ModeAndSubmode, ModeReply, ModeRequest},
pus::mode::Subservice,
request::GenericMessage,
spacepackets::{
ecss::tc::{PusTcCreator, PusTcSecondaryHeader},
SpHeader,
},
};
use satrs_example::config::tmtc_err;
use crate::pus::{
mode::ModeReplyHandler,
tests::{PusConverterTestbench, ReplyHandlerTestbench},
};
use super::ModeRequestConverter;
#[test]
fn mode_converter_read_mode_request() {
let mut testbench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), ModeRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let sec_header = PusTcSecondaryHeader::new_simple(200, Subservice::TcReadMode as u8);
let mut app_data: [u8; 4] = [0; 4];
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
let tc = PusTcCreator::new(sp_header, sec_header, &app_data, true);
let token = testbench.add_tc(&tc);
let (_active_req, req) = testbench
.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion has failed");
assert_eq!(req, ModeRequest::ReadMode);
}
#[test]
fn mode_converter_set_mode_request() {
let mut testbench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), ModeRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let sec_header = PusTcSecondaryHeader::new_simple(200, Subservice::TcSetMode as u8);
let mut app_data: [u8; 4 + ModeAndSubmode::RAW_LEN] = [0; 4 + ModeAndSubmode::RAW_LEN];
let mode_and_submode = ModeAndSubmode::new(2, 1);
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
mode_and_submode
.write_to_be_bytes(&mut app_data[4..])
.unwrap();
let tc = PusTcCreator::new(sp_header, sec_header, &app_data, true);
let token = testbench.add_tc(&tc);
let (_active_req, req) = testbench
.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion has failed");
assert_eq!(req, ModeRequest::SetMode(mode_and_submode));
}
#[test]
fn mode_converter_announce_mode() {
let mut testbench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), ModeRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let sec_header = PusTcSecondaryHeader::new_simple(200, Subservice::TcAnnounceMode as u8);
let mut app_data: [u8; 4] = [0; 4];
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
let tc = PusTcCreator::new(sp_header, sec_header, &app_data, true);
let token = testbench.add_tc(&tc);
let (_active_req, req) = testbench
.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion has failed");
assert_eq!(req, ModeRequest::AnnounceMode);
}
#[test]
fn mode_converter_announce_mode_recursively() {
let mut testbench =
PusConverterTestbench::new(TEST_COMPONENT_ID_0.id(), ModeRequestConverter::default());
let sp_header = SpHeader::new_for_unseg_tc(TEST_APID, 0, 0);
let sec_header =
PusTcSecondaryHeader::new_simple(200, Subservice::TcAnnounceModeRecursive as u8);
let mut app_data: [u8; 4] = [0; 4];
app_data[0..4].copy_from_slice(&TEST_UNIQUE_ID_0.to_be_bytes());
let tc = PusTcCreator::new(sp_header, sec_header, &app_data, true);
let token = testbench.add_tc(&tc);
let (_active_req, req) = testbench
.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion has failed");
assert_eq!(req, ModeRequest::AnnounceModeRecursive);
}
#[test]
fn reply_handling_unrequested_reply() {
let mut testbench = ReplyHandlerTestbench::new(
TEST_COMPONENT_ID_0.id(),
ModeReplyHandler::new(TEST_COMPONENT_ID_0.id()),
);
let mode_reply = ModeReply::ModeReply(ModeAndSubmode::new(5, 1));
let unrequested_reply =
GenericMessage::new(MessageMetadata::new(10_u32, 15_u64), mode_reply);
// Right now this function does not do a lot. We simply check that it does not panic or do
// weird stuff.
let result = testbench.handle_unrequested_reply(&unrequested_reply);
assert!(result.is_ok());
}
#[test]
fn reply_handling_reply_timeout() {
let mut testbench = ReplyHandlerTestbench::new(
TEST_COMPONENT_ID_0.id(),
ModeReplyHandler::new(TEST_COMPONENT_ID_0.id()),
);
let (req_id, active_request) = testbench.add_tc(TEST_APID, TEST_UNIQUE_ID_0, &[]);
let result = testbench.handle_request_timeout(&active_request, &[]);
assert!(result.is_ok());
testbench.verif_reporter.assert_completion_failure(
TEST_COMPONENT_ID_0.raw(),
req_id,
None,
tmtc_err::REQUEST_TIMEOUT.raw() as u64,
);
}
}

View File

@ -1,23 +1,18 @@
use std::sync::mpsc;
use std::time::Duration;
use crate::pus::create_verification_reporter;
use log::{error, info, warn};
use satrs::pool::{PoolProvider, StaticMemoryPool, StoreAddr};
use satrs::pool::{PoolProvider, StaticMemoryPool};
use satrs::pus::scheduler::{PusScheduler, TcInfo};
use satrs::pus::scheduler_srv::PusService11SchedHandler;
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::VerificationReportingProvider;
use satrs::pus::scheduler_srv::PusSchedServiceHandler;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
EcssTcReceiverCore, EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
EcssTmSenderCore, MpscTcReceiver, MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded,
PusPacketHandlerResult, PusServiceHelper, PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::ChannelId;
use satrs_example::config::{TcReceiverId, TmSenderId, PUS_APID};
use satrs_example::config::components::PUS_SCHED_SERVICE;
use crate::tmtc::PusTcSourceProviderSharedPool;
@ -55,14 +50,12 @@ impl TcReleaser for mpsc::Sender<Vec<u8>> {
}
}
pub struct Pus11Wrapper<
TcReceiver: EcssTcReceiverCore,
pub struct SchedulingServiceWrapper<
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus_11_handler: PusService11SchedHandler<
TcReceiver,
pub pus_11_handler: PusSchedServiceHandler<
MpscTcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
@ -73,12 +66,8 @@ pub struct Pus11Wrapper<
pub tc_releaser: Box<dyn TcReleaser + Send>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
SchedulingServiceWrapper<TmSender, TcInMemConverter>
{
pub fn release_tcs(&mut self) {
let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool {
@ -103,8 +92,11 @@ impl<
}
}
pub fn handle_next_packet(&mut self) -> bool {
match self.pus_11_handler.handle_one_tc(&mut self.sched_tc_pool) {
pub fn poll_and_handle_next_tc(&mut self, time_stamp: &[u8]) -> bool {
match self
.pus_11_handler
.poll_and_handle_next_tc(time_stamp, &mut self.sched_tc_pool)
{
Ok(result) => match result {
PusPacketHandlerResult::RequestHandled => {}
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
@ -129,42 +121,24 @@ impl<
}
pub fn create_scheduler_service_static(
shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_releaser: PusTcSourceProviderSharedPool,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool,
) -> Pus11Wrapper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let sched_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusSched as ChannelId,
"PUS_11_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let sched_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusSched as ChannelId,
"PUS_11_TC_RECV",
pus_sched_rx,
);
) -> SchedulingServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
.expect("Creating PUS Scheduler failed");
let pus_11_handler = PusService11SchedHandler::new(
let pus_11_handler = PusSchedServiceHandler::new(
PusServiceHelper::new(
sched_srv_receiver,
sched_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_SCHED_SERVICE.id(),
pus_sched_rx,
tm_sender,
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_releaser.clone_backing_pool(), 2048),
),
scheduler,
);
Pus11Wrapper {
SchedulingServiceWrapper {
pus_11_handler,
sched_tc_pool,
releaser_buf: [0; 4096],
@ -173,40 +147,26 @@ pub fn create_scheduler_service_static(
}
pub fn create_scheduler_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithVecMpscSender,
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
tc_source_sender: mpsc::Sender<Vec<u8>>,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool,
) -> Pus11Wrapper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let sched_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusSched as ChannelId,
"PUS_11_TM_SENDER",
tm_funnel_tx,
);
let sched_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusSched as ChannelId,
"PUS_11_TC_RECV",
pus_sched_rx,
);
) -> SchedulingServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
//let sched_srv_receiver =
//MpscTcReceiver::new(PUS_SCHED_SERVICE.raw(), "PUS_11_TC_RECV", pus_sched_rx);
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
.expect("Creating PUS Scheduler failed");
let pus_11_handler = PusService11SchedHandler::new(
let pus_11_handler = PusSchedServiceHandler::new(
PusServiceHelper::new(
sched_srv_receiver,
sched_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_SCHED_SERVICE.id(),
pus_sched_rx,
tm_funnel_tx,
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
EcssTcInVecConverter::default(),
),
scheduler,
);
Pus11Wrapper {
SchedulingServiceWrapper {
pus_11_handler,
sched_tc_pool,
releaser_buf: [0; 4096],

View File

@ -1,69 +1,71 @@
use satrs::pus::{
verification::VerificationReportingProvider, EcssTcInMemConverter, EcssTcReceiverCore,
EcssTmSenderCore,
use crate::pus::mode::ModeServiceWrapper;
use derive_new::new;
use satrs::{
pus::{EcssTcInMemConverter, EcssTmSenderCore},
spacepackets::time::{cds, TimeWriter},
};
use super::{
action::Pus8Wrapper, event::Pus5Wrapper, hk::Pus3Wrapper, scheduler::Pus11Wrapper,
test::Service17CustomWrapper,
action::ActionServiceWrapper, event::EventServiceWrapper, hk::HkServiceWrapper,
scheduler::SchedulingServiceWrapper, test::TestCustomServiceWrapper, HandlingStatus,
TargetedPusService,
};
pub struct PusStack<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
event_srv: Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
hk_srv: Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
action_srv: Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
schedule_srv: Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
test_srv: Service17CustomWrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
#[derive(new)]
pub struct PusStack<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter> {
test_srv: TestCustomServiceWrapper<TmSender, TcInMemConverter>,
hk_srv_wrapper: HkServiceWrapper<TmSender, TcInMemConverter>,
event_srv: EventServiceWrapper<TmSender, TcInMemConverter>,
action_srv_wrapper: ActionServiceWrapper<TmSender, TcInMemConverter>,
schedule_srv: SchedulingServiceWrapper<TmSender, TcInMemConverter>,
mode_srv: ModeServiceWrapper<TmSender, TcInMemConverter>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> PusStack<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
PusStack<TmSender, TcInMemConverter>
{
pub fn new(
hk_srv: Pus3Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
event_srv: Pus5Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
action_srv: Pus8Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
schedule_srv: Pus11Wrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
test_srv: Service17CustomWrapper<
TcReceiver,
TmSender,
TcInMemConverter,
VerificationReporter,
>,
) -> Self {
Self {
event_srv,
action_srv,
schedule_srv,
test_srv,
hk_srv,
}
}
pub fn periodic_operation(&mut self) {
// Release all telecommands which reached their release time before calling the service
// handlers.
self.schedule_srv.release_tcs();
let time_stamp = cds::CdsTime::now_with_u16_days()
.expect("time stamp generation error")
.to_vec()
.unwrap();
loop {
let mut all_queues_empty = true;
let mut is_srv_finished = |srv_handler_finished: bool| {
if !srv_handler_finished {
all_queues_empty = false;
}
};
is_srv_finished(self.test_srv.handle_next_packet());
is_srv_finished(self.schedule_srv.handle_next_packet());
is_srv_finished(self.event_srv.handle_next_packet());
is_srv_finished(self.action_srv.handle_next_packet());
is_srv_finished(self.hk_srv.handle_next_packet());
if all_queues_empty {
let mut nothing_to_do = true;
let mut is_srv_finished =
|tc_handling_done: bool, reply_handling_done: Option<HandlingStatus>| {
if !tc_handling_done
|| (reply_handling_done.is_some()
&& reply_handling_done.unwrap() == HandlingStatus::Empty)
{
nothing_to_do = false;
}
};
is_srv_finished(self.test_srv.poll_and_handle_next_packet(&time_stamp), None);
is_srv_finished(self.schedule_srv.poll_and_handle_next_tc(&time_stamp), None);
is_srv_finished(self.event_srv.poll_and_handle_next_tc(&time_stamp), None);
is_srv_finished(
self.action_srv_wrapper.poll_and_handle_next_tc(&time_stamp),
Some(
self.action_srv_wrapper
.poll_and_handle_next_reply(&time_stamp),
),
);
is_srv_finished(
self.hk_srv_wrapper.poll_and_handle_next_tc(&time_stamp),
Some(self.hk_srv_wrapper.poll_and_handle_next_reply(&time_stamp)),
);
is_srv_finished(
self.mode_srv.poll_and_handle_next_tc(&time_stamp),
Some(self.mode_srv.poll_and_handle_next_reply(&time_stamp)),
);
if nothing_to_do {
// Timeout checking is only done once.
self.action_srv_wrapper.check_for_request_timeouts();
self.hk_srv_wrapper.check_for_request_timeouts();
self.mode_srv.check_for_request_timeouts();
break;
}
}

View File

@ -1,118 +1,74 @@
use crate::pus::create_verification_reporter;
use log::{info, warn};
use satrs::params::Params;
use satrs::pool::{SharedStaticMemoryPool, StoreAddr};
use satrs::event_man::{EventMessage, EventMessageU32};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::test::PusService17TestHandler;
use satrs::pus::verification::{FailParams, VerificationReportingProvider};
use satrs::pus::verification::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
};
use satrs::pus::verification::{FailParams, VerificationReporter, VerificationReportingProvider};
use satrs::pus::EcssTcInSharedStoreConverter;
use satrs::pus::{
EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter, EcssTcReceiverCore,
EcssTmSenderCore, MpscTcReceiver, PusPacketHandlerResult, PusServiceHelper,
TmAsVecSenderWithId, TmAsVecSenderWithMpsc, TmInSharedPoolSenderWithBoundedMpsc,
TmInSharedPoolSenderWithId,
EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter, EcssTmSenderCore, MpscTcReceiver,
MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded, PusPacketHandlerResult, PusServiceHelper,
PusTmAsVec, PusTmInPool, TmInSharedPoolSender,
};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::PusPacket;
use satrs::spacepackets::time::cds::CdsTime;
use satrs::spacepackets::time::TimeWriter;
use satrs::tmtc::tm_helper::SharedTmPool;
use satrs::ChannelId;
use satrs::{events::EventU32, pus::EcssTcInSharedStoreConverter};
use satrs_example::config::{tmtc_err, TcReceiverId, TmSenderId, PUS_APID, TEST_EVENT};
use std::sync::mpsc::{self, Sender};
use satrs_example::config::components::PUS_TEST_SERVICE;
use satrs_example::config::{tmtc_err, TEST_EVENT};
use std::sync::mpsc;
pub fn create_test_service_static(
shared_tm_store: SharedTmPool,
tm_funnel_tx: mpsc::SyncSender<StoreAddr>,
verif_reporter: VerificationReporterWithSharedPoolMpscBoundedSender,
tm_sender: TmInSharedPoolSender<mpsc::SyncSender<PusTmInPool>>,
tc_pool: SharedStaticMemoryPool,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
event_sender: mpsc::Sender<EventMessageU32>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> Service17CustomWrapper<
MpscTcReceiver,
TmInSharedPoolSenderWithBoundedMpsc,
EcssTcInSharedStoreConverter,
VerificationReporterWithSharedPoolMpscBoundedSender,
> {
let test_srv_tm_sender = TmInSharedPoolSenderWithId::new(
TmSenderId::PusTest as ChannelId,
"PUS_17_TM_SENDER",
shared_tm_store.clone(),
tm_funnel_tx.clone(),
);
let test_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusTest as ChannelId,
"PUS_17_TC_RECV",
pus_test_rx,
);
) -> TestCustomServiceWrapper<MpscTmInSharedPoolSenderBounded, EcssTcInSharedStoreConverter> {
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
test_srv_receiver,
test_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_TEST_SERVICE.id(),
pus_test_rx,
tm_sender,
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
));
Service17CustomWrapper {
pus17_handler,
TestCustomServiceWrapper {
handler: pus17_handler,
test_srv_event_sender: event_sender,
}
}
pub fn create_test_service_dynamic(
tm_funnel_tx: mpsc::Sender<Vec<u8>>,
verif_reporter: VerificationReporterWithVecMpscSender,
event_sender: mpsc::Sender<(EventU32, Option<Params>)>,
tm_funnel_tx: mpsc::Sender<PusTmAsVec>,
event_sender: mpsc::Sender<EventMessageU32>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> Service17CustomWrapper<
MpscTcReceiver,
TmAsVecSenderWithMpsc,
EcssTcInVecConverter,
VerificationReporterWithVecMpscSender,
> {
let test_srv_tm_sender = TmAsVecSenderWithId::new(
TmSenderId::PusTest as ChannelId,
"PUS_17_TM_SENDER",
tm_funnel_tx.clone(),
);
let test_srv_receiver = MpscTcReceiver::new(
TcReceiverId::PusTest as ChannelId,
"PUS_17_TC_RECV",
pus_test_rx,
);
) -> TestCustomServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
test_srv_receiver,
test_srv_tm_sender,
PUS_APID,
verif_reporter.clone(),
PUS_TEST_SERVICE.id(),
pus_test_rx,
tm_funnel_tx,
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
EcssTcInVecConverter::default(),
));
Service17CustomWrapper {
pus17_handler,
TestCustomServiceWrapper {
handler: pus17_handler,
test_srv_event_sender: event_sender,
}
}
pub struct Service17CustomWrapper<
TcReceiver: EcssTcReceiverCore,
pub struct TestCustomServiceWrapper<
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> {
pub pus17_handler:
PusService17TestHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub test_srv_event_sender: Sender<(EventU32, Option<Params>)>,
pub handler:
PusService17TestHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub test_srv_event_sender: mpsc::Sender<EventMessageU32>,
}
impl<
TcReceiver: EcssTcReceiverCore,
TmSender: EcssTmSenderCore,
TcInMemConverter: EcssTcInMemConverter,
VerificationReporter: VerificationReportingProvider,
> Service17CustomWrapper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
impl<TmSender: EcssTmSenderCore, TcInMemConverter: EcssTcInMemConverter>
TestCustomServiceWrapper<TmSender, TcInMemConverter>
{
pub fn handle_next_packet(&mut self) -> bool {
let res = self.pus17_handler.handle_one_tc();
pub fn poll_and_handle_next_packet(&mut self, time_stamp: &[u8]) -> bool {
let res = self.handler.poll_and_handle_next_tc(time_stamp);
if res.is_err() {
warn!("PUS17 handler failed with error {:?}", res.unwrap_err());
return true;
@ -133,7 +89,7 @@ impl<
}
PusPacketHandlerResult::CustomSubservice(subservice, token) => {
let (tc, _) = PusTcReader::new(
self.pus17_handler
self.handler
.service_helper
.tc_in_mem_converter
.tc_slice_raw(),
@ -145,28 +101,30 @@ impl<
if subservice == 128 {
info!("Generating test event");
self.test_srv_event_sender
.send((TEST_EVENT.into(), None))
.send(EventMessage::new(PUS_TEST_SERVICE.id(), TEST_EVENT.into()))
.expect("Sending test event failed");
let start_token = self
.pus17_handler
.handler
.service_helper
.common
.verification_handler
.start_success(token, &stamp_buf)
.verif_reporter()
.start_success(self.handler.service_helper.tm_sender(), token, &stamp_buf)
.expect("Error sending start success");
self.pus17_handler
self.handler
.service_helper
.common
.verification_handler
.completion_success(start_token, &stamp_buf)
.verif_reporter()
.completion_success(
self.handler.service_helper.tm_sender(),
start_token,
&stamp_buf,
)
.expect("Error sending completion success");
} else {
let fail_data = [tc.subservice()];
self.pus17_handler
self.handler
.service_helper
.common
.verification_handler
.verif_reporter()
.start_failure(
self.handler.service_helper.tm_sender(),
token,
FailParams::new(
&stamp_buf,

View File

@ -1,94 +1,152 @@
use std::collections::HashMap;
use std::sync::mpsc;
use derive_new::new;
use log::warn;
use satrs::action::ActionRequest;
use satrs::hk::HkRequest;
use satrs::mode::ModeRequest;
use satrs::pus::action::PusActionRequestRouter;
use satrs::pus::hk::PusHkRequestRouter;
use satrs::pus::verification::{TcStateAccepted, VerificationToken};
use satrs::pus::GenericRoutingError;
use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{ActiveRequestProvider, EcssTmSenderCore, GenericRoutingError, PusRequestRouter};
use satrs::queue::GenericSendError;
use satrs::TargetId;
use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::PusPacket;
use satrs::ComponentId;
use satrs_example::config::components::PUS_ROUTING_SERVICE;
use satrs_example::config::tmtc_err;
#[allow(dead_code)]
#[derive(Clone, Eq, PartialEq, Debug)]
#[derive(Clone, Debug)]
#[non_exhaustive]
pub enum Request {
pub enum CompositeRequest {
Hk(HkRequest),
Mode(ModeRequest),
Action(ActionRequest),
}
#[derive(Clone, Eq, PartialEq, Debug, new)]
pub struct TargetedRequest {
pub(crate) target_id: TargetId,
pub(crate) request: Request,
#[derive(Clone)]
pub struct GenericRequestRouter {
pub id: ComponentId,
// All messages which do not have a dedicated queue.
pub composite_router_map: HashMap<ComponentId, mpsc::Sender<GenericMessage<CompositeRequest>>>,
pub mode_router_map: HashMap<ComponentId, mpsc::Sender<GenericMessage<ModeRequest>>>,
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct RequestWithToken {
pub(crate) targeted_request: TargetedRequest,
pub(crate) token: VerificationToken<TcStateAccepted>,
}
impl RequestWithToken {
pub fn new(
target_id: TargetId,
request: Request,
token: VerificationToken<TcStateAccepted>,
) -> Self {
impl Default for GenericRequestRouter {
fn default() -> Self {
Self {
targeted_request: TargetedRequest::new(target_id, request),
token,
id: PUS_ROUTING_SERVICE.raw(),
composite_router_map: Default::default(),
mode_router_map: Default::default(),
}
}
}
#[derive(Default, Clone)]
pub struct GenericRequestRouter(pub HashMap<TargetId, mpsc::Sender<RequestWithToken>>);
impl PusHkRequestRouter for GenericRequestRouter {
impl GenericRequestRouter {
pub(crate) fn handle_error_generic(
&self,
active_request: &impl ActiveRequestProvider,
tc: &PusTcReader,
error: GenericRoutingError,
tm_sender: &(impl EcssTmSenderCore + ?Sized),
verif_reporter: &impl VerificationReportingProvider,
time_stamp: &[u8],
) {
warn!(
"Routing request for service {} failed: {error:?}",
tc.service()
);
let accepted_token: VerificationToken<TcStateAccepted> = active_request
.token()
.try_into()
.expect("token is not in accepted state");
match error {
GenericRoutingError::UnknownTargetId(id) => {
let apid_target_id = UniqueApidTargetId::from(id);
warn!("Target APID for request: {}", apid_target_id.apid);
warn!("Target Unique ID for request: {}", apid_target_id.unique_id);
let mut fail_data: [u8; 8] = [0; 8];
fail_data.copy_from_slice(&id.to_be_bytes());
verif_reporter
.completion_failure(
tm_sender,
accepted_token,
FailParams::new(time_stamp, &tmtc_err::UNKNOWN_TARGET_ID, &fail_data),
)
.expect("Sending start failure failed");
}
GenericRoutingError::Send(_) => {
let mut fail_data: [u8; 8] = [0; 8];
fail_data.copy_from_slice(&active_request.target_id().to_be_bytes());
verif_reporter
.completion_failure(
tm_sender,
accepted_token,
FailParams::new(time_stamp, &tmtc_err::ROUTING_ERROR, &fail_data),
)
.expect("Sending start failure failed");
}
}
}
}
impl PusRequestRouter<HkRequest> for GenericRequestRouter {
type Error = GenericRoutingError;
fn route(
&self,
target_id: TargetId,
requestor_info: MessageMetadata,
target_id: ComponentId,
hk_request: HkRequest,
token: VerificationToken<TcStateAccepted>,
) -> Result<(), Self::Error> {
if let Some(sender) = self.0.get(&target_id) {
if let Some(sender) = self.composite_router_map.get(&target_id) {
sender
.send(RequestWithToken::new(
target_id,
Request::Hk(hk_request),
token,
.send(GenericMessage::new(
requestor_info,
CompositeRequest::Hk(hk_request),
))
.map_err(|_| GenericRoutingError::SendError(GenericSendError::RxDisconnected))?;
.map_err(|_| GenericRoutingError::Send(GenericSendError::RxDisconnected))?;
return Ok(());
}
Ok(())
Err(GenericRoutingError::UnknownTargetId(target_id))
}
}
impl PusActionRequestRouter for GenericRequestRouter {
impl PusRequestRouter<ActionRequest> for GenericRequestRouter {
type Error = GenericRoutingError;
fn route(
&self,
target_id: TargetId,
requestor_info: MessageMetadata,
target_id: ComponentId,
action_request: ActionRequest,
token: VerificationToken<TcStateAccepted>,
) -> Result<(), Self::Error> {
if let Some(sender) = self.0.get(&target_id) {
if let Some(sender) = self.composite_router_map.get(&target_id) {
sender
.send(RequestWithToken::new(
target_id,
Request::Action(action_request),
token,
.send(GenericMessage::new(
requestor_info,
CompositeRequest::Action(action_request),
))
.map_err(|_| GenericRoutingError::SendError(GenericSendError::RxDisconnected))?;
.map_err(|_| GenericRoutingError::Send(GenericSendError::RxDisconnected))?;
return Ok(());
}
Ok(())
Err(GenericRoutingError::UnknownTargetId(target_id))
}
}
impl PusRequestRouter<ModeRequest> for GenericRequestRouter {
type Error = GenericRoutingError;
fn route(
&self,
requestor_info: MessageMetadata,
target_id: ComponentId,
request: ModeRequest,
) -> Result<(), Self::Error> {
if let Some(sender) = self.mode_router_map.get(&target_id) {
sender
.send(GenericMessage::new(requestor_info, request))
.map_err(|_| GenericRoutingError::Send(GenericSendError::RxDisconnected))?;
return Ok(());
}
Err(GenericRoutingError::UnknownTargetId(target_id))
}
}

View File

@ -1,5 +1,5 @@
use std::{
collections::VecDeque,
collections::{HashSet, VecDeque},
sync::{Arc, Mutex},
};
@ -10,12 +10,9 @@ use satrs::{
spacepackets::PacketId,
tmtc::{CcsdsDistributor, CcsdsError, ReceivesCcsdsTc, TmPacketSourceCore},
};
use satrs_example::config::PUS_APID;
use crate::ccsds::CcsdsReceiver;
pub const PACKET_ID_LOOKUP: &[PacketId] = &[PacketId::const_tc(true, PUS_APID)];
#[derive(Default, Clone)]
pub struct SyncTcpTmSource {
tm_queue: Arc<Mutex<VecDeque<Vec<u8>>>>,
@ -77,6 +74,7 @@ pub type TcpServerType<TcSource, MpscErrorType> = TcpSpacepacketsServer<
CcsdsError<MpscErrorType>,
SyncTcpTmSource,
CcsdsDistributor<CcsdsReceiver<TcSource, MpscErrorType>, MpscErrorType>,
HashSet<PacketId>,
>;
pub struct TcpTask<
@ -103,14 +101,10 @@ impl<
cfg: ServerConfig,
tm_source: SyncTcpTmSource,
tc_receiver: CcsdsDistributor<CcsdsReceiver<TcSource, MpscErrorType>, MpscErrorType>,
packet_id_lookup: HashSet<PacketId>,
) -> Result<Self, std::io::Error> {
Ok(Self {
server: TcpSpacepacketsServer::new(
cfg,
tm_source,
tc_receiver,
Box::new(PACKET_ID_LOOKUP),
)?,
server: TcpSpacepacketsServer::new(cfg, tm_source, tc_receiver, packet_id_lookup)?,
})
}

View File

@ -4,8 +4,9 @@ use std::{
};
use log::info;
use satrs::pus::{PusTmAsVec, PusTmInPool};
use satrs::{
pool::{PoolProvider, StoreAddr},
pool::PoolProvider,
seq_count::{CcsdsSimpleSeqCountProvider, SequenceCountProviderCore},
spacepackets::{
ecss::{tm::PusTmZeroCopyWriter, PusPacket},
@ -77,16 +78,16 @@ impl TmFunnelCommon {
pub struct TmFunnelStatic {
common: TmFunnelCommon,
shared_tm_store: SharedTmPool,
tm_funnel_rx: mpsc::Receiver<StoreAddr>,
tm_server_tx: mpsc::SyncSender<StoreAddr>,
tm_funnel_rx: mpsc::Receiver<PusTmInPool>,
tm_server_tx: mpsc::SyncSender<PusTmInPool>,
}
impl TmFunnelStatic {
pub fn new(
shared_tm_store: SharedTmPool,
sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: mpsc::Receiver<StoreAddr>,
tm_server_tx: mpsc::SyncSender<StoreAddr>,
tm_funnel_rx: mpsc::Receiver<PusTmInPool>,
tm_server_tx: mpsc::SyncSender<PusTmInPool>,
) -> Self {
Self {
common: TmFunnelCommon::new(sync_tm_tcp_source),
@ -97,14 +98,14 @@ impl TmFunnelStatic {
}
pub fn operation(&mut self) {
if let Ok(addr) = self.tm_funnel_rx.recv() {
if let Ok(pus_tm_in_pool) = self.tm_funnel_rx.recv() {
// Read the TM, set sequence counter and message counter, and finally update
// the CRC.
let shared_pool = self.shared_tm_store.clone_backing_pool();
let mut pool_guard = shared_pool.write().expect("Locking TM pool failed");
let mut tm_copy = Vec::new();
pool_guard
.modify(&addr, |buf| {
.modify(&pus_tm_in_pool.store_addr, |buf| {
let zero_copy_writer = PusTmZeroCopyWriter::new(buf, MIN_CDS_FIELD_LEN)
.expect("Creating TM zero copy writer failed");
self.common.apply_packet_processing(zero_copy_writer);
@ -112,7 +113,7 @@ impl TmFunnelStatic {
})
.expect("Reading TM from pool failed");
self.tm_server_tx
.send(addr)
.send(pus_tm_in_pool)
.expect("Sending TM to server failed");
// We could also do this step in the update closure, but I'd rather avoid this, could
// lead to nested locking.
@ -123,15 +124,15 @@ impl TmFunnelStatic {
pub struct TmFunnelDynamic {
common: TmFunnelCommon,
tm_funnel_rx: mpsc::Receiver<Vec<u8>>,
tm_server_tx: mpsc::Sender<Vec<u8>>,
tm_funnel_rx: mpsc::Receiver<PusTmAsVec>,
tm_server_tx: mpsc::Sender<PusTmAsVec>,
}
impl TmFunnelDynamic {
pub fn new(
sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: mpsc::Receiver<Vec<u8>>,
tm_server_tx: mpsc::Sender<Vec<u8>>,
tm_funnel_rx: mpsc::Receiver<PusTmAsVec>,
tm_server_tx: mpsc::Sender<PusTmAsVec>,
) -> Self {
Self {
common: TmFunnelCommon::new(sync_tm_tcp_source),
@ -144,13 +145,13 @@ impl TmFunnelDynamic {
if let Ok(mut tm) = self.tm_funnel_rx.recv() {
// Read the TM, set sequence counter and message counter, and finally update
// the CRC.
let zero_copy_writer = PusTmZeroCopyWriter::new(&mut tm, MIN_CDS_FIELD_LEN)
let zero_copy_writer = PusTmZeroCopyWriter::new(&mut tm.packet, MIN_CDS_FIELD_LEN)
.expect("Creating TM zero copy writer failed");
self.common.apply_packet_processing(zero_copy_writer);
self.common.sync_tm_tcp_source.add_tm(&tm.packet);
self.tm_server_tx
.send(tm.clone())
.send(tm)
.expect("Sending TM to server failed");
self.common.sync_tm_tcp_source.add_tm(&tm);
}
}
}

View File

@ -1,8 +1,7 @@
use log::warn;
use satrs::pus::verification::std_mod::{
VerificationReporterWithSharedPoolMpscBoundedSender, VerificationReporterWithVecMpscSender,
use satrs::pus::{
EcssTcAndToken, MpscTmAsVecSender, MpscTmInSharedPoolSenderBounded, ReceivesEcssPusTc,
};
use satrs::pus::{EcssTcAndToken, ReceivesEcssPusTc};
use satrs::spacepackets::SpHeader;
use std::sync::mpsc::{self, Receiver, SendError, Sender, SyncSender, TryRecvError};
use thiserror::Error;
@ -100,14 +99,14 @@ pub struct TcSourceTaskStatic {
shared_tc_pool: SharedTcPool,
tc_receiver: Receiver<StoreAddr>,
tc_buf: [u8; 4096],
pus_receiver: PusReceiver<VerificationReporterWithSharedPoolMpscBoundedSender>,
pus_receiver: PusReceiver<MpscTmInSharedPoolSenderBounded>,
}
impl TcSourceTaskStatic {
pub fn new(
shared_tc_pool: SharedTcPool,
tc_receiver: Receiver<StoreAddr>,
pus_receiver: PusReceiver<VerificationReporterWithSharedPoolMpscBoundedSender>,
pus_receiver: PusReceiver<MpscTmInSharedPoolSenderBounded>,
) -> Self {
Self {
shared_tc_pool,
@ -164,13 +163,13 @@ impl TcSourceTaskStatic {
// TC source components where the heap is the backing memory of the received telecommands.
pub struct TcSourceTaskDynamic {
pub tc_receiver: Receiver<Vec<u8>>,
pus_receiver: PusReceiver<VerificationReporterWithVecMpscSender>,
pus_receiver: PusReceiver<MpscTmAsVecSender>,
}
impl TcSourceTaskDynamic {
pub fn new(
tc_receiver: Receiver<Vec<u8>>,
pus_receiver: PusReceiver<VerificationReporterWithVecMpscSender>,
pus_receiver: PusReceiver<MpscTmAsVecSender>,
) -> Self {
Self {
tc_receiver,

View File

@ -1,12 +1,11 @@
use std::{
net::{SocketAddr, UdpSocket},
sync::mpsc::Receiver,
};
use std::net::{SocketAddr, UdpSocket};
use std::sync::mpsc;
use log::{info, warn};
use satrs::pus::{PusTmAsVec, PusTmInPool};
use satrs::{
hal::std::udp_server::{ReceiveResult, UdpTcServer},
pool::{PoolProviderWithGuards, SharedStaticMemoryPool, StoreAddr},
pool::{PoolProviderWithGuards, SharedStaticMemoryPool},
tmtc::CcsdsError,
};
@ -15,20 +14,20 @@ pub trait UdpTmHandler {
}
pub struct StaticUdpTmHandler {
pub tm_rx: Receiver<StoreAddr>,
pub tm_rx: mpsc::Receiver<PusTmInPool>,
pub tm_store: SharedStaticMemoryPool,
}
impl UdpTmHandler for StaticUdpTmHandler {
fn send_tm_to_udp_client(&mut self, socket: &UdpSocket, &recv_addr: &SocketAddr) {
while let Ok(addr) = self.tm_rx.try_recv() {
while let Ok(pus_tm_in_pool) = self.tm_rx.try_recv() {
let store_lock = self.tm_store.write();
if store_lock.is_err() {
warn!("Locking TM store failed");
continue;
}
let mut store_lock = store_lock.unwrap();
let pg = store_lock.read_with_guard(addr);
let pg = store_lock.read_with_guard(pus_tm_in_pool.store_addr);
let read_res = pg.read_as_vec();
if read_res.is_err() {
warn!("Error reading TM pool data");
@ -44,20 +43,20 @@ impl UdpTmHandler for StaticUdpTmHandler {
}
pub struct DynamicUdpTmHandler {
pub tm_rx: Receiver<Vec<u8>>,
pub tm_rx: mpsc::Receiver<PusTmAsVec>,
}
impl UdpTmHandler for DynamicUdpTmHandler {
fn send_tm_to_udp_client(&mut self, socket: &UdpSocket, recv_addr: &SocketAddr) {
while let Ok(tm) = self.tm_rx.try_recv() {
if tm.len() > 9 {
let service = tm[7];
let subservice = tm[8];
if tm.packet.len() > 9 {
let service = tm.packet[7];
let subservice = tm.packet[8];
info!("Sending PUS TM[{service},{subservice}]")
} else {
info!("Sending PUS TM");
}
let result = socket.send_to(&tm, recv_addr);
let result = socket.send_to(&tm.packet, recv_addr);
if let Err(e) = result {
warn!("Sending TM with UDP socket failed: {e}")
}
@ -120,7 +119,7 @@ mod tests {
},
tmtc::ReceivesTcCore,
};
use satrs_example::config::{OBSW_SERVER_ADDR, PUS_APID};
use satrs_example::config::{components, OBSW_SERVER_ADDR};
use super::*;
@ -178,8 +177,8 @@ mod tests {
udp_tc_server,
tm_handler,
};
let mut sph = SpHeader::tc_unseg(PUS_APID, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true)
let sph = SpHeader::new_for_unseg_tc(components::Apid::GenericPus as u16, 0, 0);
let ping_tc = PusTcCreator::new_simple(sph, 17, 1, &[], true)
.to_vec()
.unwrap();
let client = UdpSocket::bind("127.0.0.1:0").expect("Connecting to UDP server failed");