Mode Tree Feature Update

This commit is contained in:
2024-11-21 18:35:16 +01:00
parent 1a1d330814
commit 90a6b07575
43 changed files with 5577 additions and 1528 deletions

View File

@ -20,6 +20,7 @@ thiserror = "2"
lazy_static = "1"
strum = { version = "0.26", features = ["derive"] }
derive-new = "0.7"
cfg-if = "1"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
@ -35,8 +36,8 @@ version = "0.1.1"
path = "../satrs-mib"
[features]
dyn_tmtc = []
default = ["dyn_tmtc"]
heap_tmtc = []
default = ["heap_tmtc"]
[dev-dependencies]
env_logger = "0.11"

View File

@ -14,7 +14,7 @@ You can run the application using `cargo run`.
# Features
The example has the `dyn_tmtc` feature which is enabled by default. With this feature enabled,
The example has the `heap_tmtc` feature which is enabled by default. With this feature enabled,
TMTC packets are exchanged using the heap as the backing memory instead of pre-allocated static
stores.

View File

@ -1,7 +1,7 @@
use derive_new::new;
use satrs::hk::{HkRequest, HkRequestVariant};
use satrs::mode_tree::{ModeChild, ModeNode};
use satrs::power::{PowerSwitchInfo, PowerSwitcherCommandSender};
use satrs::queue::{GenericSendError, GenericTargetedMessagingError};
use satrs_example::{DeviceMode, TimestampHelper};
use satrs_minisim::acs::lis3mdl::{
MgmLis3MdlReply, MgmLis3RawValues, FIELD_LSB_PER_GAUSS_4_SENS, GAUSS_TO_MICROTESLA_FACTOR,
@ -15,7 +15,8 @@ use std::sync::{Arc, Mutex};
use std::time::Duration;
use satrs::mode::{
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequest, ModeRequestHandler,
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler,
ModeRequestHandlerMpscBounded,
};
use satrs::pus::{EcssTmSender, PusTmVariant};
use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
@ -24,6 +25,8 @@ use satrs_example::config::components::{NO_SENDER, PUS_MODE_SERVICE};
use crate::hk::PusHkHelper;
use crate::pus::hk::{HkReply, HkReplyVariant};
use crate::requests::CompositeRequest;
use crate::spi::SpiInterface;
use crate::tm_sender::TmSender;
use serde::{Deserialize, Serialize};
@ -48,11 +51,6 @@ pub enum TransitionState {
Done,
}
pub trait SpiInterface {
type Error: Debug;
fn transfer(&mut self, tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error>;
}
#[derive(Default)]
pub struct SpiDummyInterface {
pub dummy_values: MgmLis3RawValues,
@ -129,13 +127,6 @@ pub struct MgmData {
pub z: f32,
}
pub struct MpscModeLeafInterface {
pub request_rx: mpsc::Receiver<GenericMessage<ModeRequest>>,
pub reply_to_pus_tx: mpsc::Sender<GenericMessage<ModeReply>>,
#[allow(dead_code)]
pub reply_to_parent_tx: mpsc::SyncSender<GenericMessage<ModeReply>>,
}
#[derive(Default)]
pub struct BufWrapper {
tx_buf: [u8; 32],
@ -166,14 +157,13 @@ impl Default for ModeHelpers {
#[allow(clippy::too_many_arguments)]
pub struct MgmHandlerLis3Mdl<
ComInterface: SpiInterface,
TmSender: EcssTmSender,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> {
id: UniqueApidTargetId,
dev_str: &'static str,
mode_interface: MpscModeLeafInterface,
mode_node: ModeRequestHandlerMpscBounded,
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
hk_reply_tx: mpsc::Sender<GenericMessage<HkReply>>,
hk_reply_tx: mpsc::SyncSender<GenericMessage<HkReply>>,
switch_helper: SwitchHelper,
tm_sender: TmSender,
pub com_interface: ComInterface,
@ -190,9 +180,8 @@ pub struct MgmHandlerLis3Mdl<
impl<
ComInterface: SpiInterface,
TmSender: EcssTmSender,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
> MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
{
pub fn periodic_operation(&mut self) {
self.stamp_helper.update_from_now();
@ -275,25 +264,28 @@ impl<
pub fn handle_mode_requests(&mut self) {
loop {
// TODO: Only allow one set mode request per cycle?
match self.mode_interface.request_rx.try_recv() {
Ok(msg) => {
let result = self.handle_mode_request(msg);
// TODO: Trigger event?
if result.is_err() {
log::warn!(
"{}: mode request failed with error {:?}",
self.dev_str,
result.err().unwrap()
);
}
}
Err(e) => {
if e != mpsc::TryRecvError::Empty {
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
match self.mode_node.try_recv_mode_request() {
Ok(opt_msg) => {
if let Some(msg) = opt_msg {
let result = self.handle_mode_request(msg);
// TODO: Trigger event?
if result.is_err() {
log::warn!(
"{}: mode request failed with error {:?}",
self.dev_str,
result.err().unwrap()
);
}
} else {
break;
}
}
Err(e) => match e {
satrs::queue::GenericReceiveError::Empty => break,
satrs::queue::GenericReceiveError::TxDisconnected(e) => {
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
}
},
}
}
}
@ -365,9 +357,8 @@ impl<
impl<
ComInterface: SpiInterface,
TmSender: EcssTmSender,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> ModeProvider for MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
> ModeProvider for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
{
fn mode_and_submode(&self) -> ModeAndSubmode {
self.mode_helpers.current
@ -376,9 +367,8 @@ impl<
impl<
ComInterface: SpiInterface,
TmSender: EcssTmSender,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> ModeRequestHandler for MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
> ModeRequestHandler for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
{
type Error = ModeError;
@ -386,6 +376,7 @@ impl<
&mut self,
requestor: MessageMetadata,
mode_and_submode: ModeAndSubmode,
_forced: bool,
) -> Result<(), satrs::mode::ModeError> {
log::info!(
"{}: transitioning to mode {:?}",
@ -448,10 +439,9 @@ impl<
requestor.sender_id()
);
}
self.mode_interface
.reply_to_pus_tx
.send(GenericMessage::new(requestor, reply))
.map_err(|_| GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected))?;
self.mode_node
.send_mode_reply(requestor, reply)
.map_err(ModeError::Send)?;
Ok(())
}
@ -464,17 +454,44 @@ impl<
}
}
impl<
ComInterface: SpiInterface,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> ModeNode for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
{
fn id(&self) -> satrs::ComponentId {
self.id.into()
}
}
impl<
ComInterface: SpiInterface,
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
> ModeChild for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
{
type Sender = mpsc::SyncSender<GenericMessage<ModeReply>>;
fn add_mode_parent(&mut self, id: satrs::ComponentId, reply_sender: Self::Sender) {
self.mode_node.add_message_target(id, reply_sender);
}
}
#[cfg(test)]
mod tests {
use std::sync::{mpsc, Arc};
use std::{
collections::HashMap,
sync::{mpsc, Arc},
};
use satrs::{
mode::{ModeReply, ModeRequest},
mode_tree::ModeParent,
power::SwitchStateBinary,
request::{GenericMessage, UniqueApidTargetId},
tmtc::PacketAsVec,
ComponentId,
};
use satrs_example::config::components::Apid;
use satrs_example::config::components::{Apid, MGM_ASSEMBLY};
use satrs_minisim::acs::lis3mdl::MgmLis3RawValues;
use crate::{eps::TestSwitchHelper, pus::hk::HkReply, requests::CompositeRequest};
@ -502,49 +519,88 @@ mod tests {
}
}
#[allow(dead_code)]
pub struct MgmTestbench {
pub mode_request_tx: mpsc::Sender<GenericMessage<ModeRequest>>,
pub mode_request_tx: mpsc::SyncSender<GenericMessage<ModeRequest>>,
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
pub hk_reply_rx: mpsc::Receiver<GenericMessage<HkReply>>,
pub tm_rx: mpsc::Receiver<PacketAsVec>,
pub handler:
MgmHandlerLis3Mdl<TestSpiInterface, mpsc::Sender<PacketAsVec>, TestSwitchHelper>,
pub handler: MgmHandlerLis3Mdl<TestSpiInterface, TestSwitchHelper>,
}
#[derive(Default)]
pub struct MgmAssemblyMock(
pub HashMap<ComponentId, mpsc::SyncSender<GenericMessage<ModeRequest>>>,
);
impl ModeNode for MgmAssemblyMock {
fn id(&self) -> satrs::ComponentId {
PUS_MODE_SERVICE.into()
}
}
impl ModeParent for MgmAssemblyMock {
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
fn add_mode_child(&mut self, id: satrs::ComponentId, request_sender: Self::Sender) {
self.0.insert(id, request_sender);
}
}
#[derive(Default)]
pub struct PusMock {
pub request_sender_map: HashMap<ComponentId, mpsc::SyncSender<GenericMessage<ModeRequest>>>,
}
impl ModeNode for PusMock {
fn id(&self) -> satrs::ComponentId {
PUS_MODE_SERVICE.into()
}
}
impl ModeParent for PusMock {
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
fn add_mode_child(&mut self, id: satrs::ComponentId, request_sender: Self::Sender) {
self.request_sender_map.insert(id, request_sender);
}
}
impl MgmTestbench {
pub fn new() -> Self {
let (request_tx, request_rx) = mpsc::channel();
let (reply_tx_to_pus, reply_rx_to_pus) = mpsc::channel();
let (request_tx, request_rx) = mpsc::sync_channel(5);
let (reply_tx_to_pus, reply_rx_to_pus) = mpsc::sync_channel(5);
let (reply_tx_to_parent, reply_rx_to_parent) = mpsc::sync_channel(5);
let mode_interface = MpscModeLeafInterface {
request_rx,
reply_to_pus_tx: reply_tx_to_pus,
reply_to_parent_tx: reply_tx_to_parent,
};
let id = UniqueApidTargetId::new(Apid::Acs as u16, 1);
let mode_node = ModeRequestHandlerMpscBounded::new(id.into(), request_rx);
let (composite_request_tx, composite_request_rx) = mpsc::channel();
let (hk_reply_tx, hk_reply_rx) = mpsc::channel();
let (tm_tx, tm_rx) = mpsc::channel::<PacketAsVec>();
let (tm_tx, tm_rx) = mpsc::channel();
let tm_sender = TmSender::new_heap(tm_tx);
let shared_mgm_set = Arc::default();
let mut handler = MgmHandlerLis3Mdl::new(
id,
"TEST_MGM",
mode_node,
composite_request_rx,
hk_reply_tx,
TestSwitchHelper::default(),
tm_sender,
TestSpiInterface::default(),
shared_mgm_set,
);
handler.add_mode_parent(PUS_MODE_SERVICE.into(), reply_tx_to_pus);
handler.add_mode_parent(MGM_ASSEMBLY.into(), reply_tx_to_parent);
Self {
mode_request_tx: request_tx,
mode_reply_rx_to_pus: reply_rx_to_pus,
mode_reply_rx_to_parent: reply_rx_to_parent,
composite_request_tx,
handler,
tm_rx,
hk_reply_rx,
handler: MgmHandlerLis3Mdl::new(
UniqueApidTargetId::new(Apid::Acs as u16, 1),
"TEST_MGM",
mode_interface,
composite_request_rx,
hk_reply_tx,
TestSwitchHelper::default(),
tm_tx,
TestSpiInterface::default(),
shared_mgm_set,
),
}
}
}
@ -575,7 +631,10 @@ mod tests {
.mode_request_tx
.send(GenericMessage::new(
MessageMetadata::new(0, PUS_MODE_SERVICE.id()),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as u32, 0)),
ModeRequest::SetMode {
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as u32, 0),
forced: false,
},
))
.expect("failed to send mode request");
testbench.handler.periodic_operation();
@ -633,7 +692,10 @@ mod tests {
.mode_request_tx
.send(GenericMessage::new(
MessageMetadata::new(0, PUS_MODE_SERVICE.id()),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as u32, 0)),
ModeRequest::SetMode {
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as u32, 0),
forced: false,
},
))
.expect("failed to send mode request");
testbench.handler.periodic_operation();

View File

@ -149,11 +149,13 @@ pub mod components {
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum AcsId {
Mgm0 = 0,
Assembly = 1,
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum EpsId {
Pcdu = 0,
Subsystem = 1,
}
#[derive(Copy, Clone, PartialEq, Eq)]
@ -176,8 +178,12 @@ pub mod components {
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusHk as u32);
pub const PUS_SCHED_SERVICE: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Sched as u16, 0);
pub const MGM_ASSEMBLY: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Assembly as u32);
pub const MGM_HANDLER_0: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Mgm0 as u32);
pub const EPS_SUBSYSTEM: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Eps as u16, EpsId::Subsystem as u32);
pub const PCDU_HANDLER: UniqueApidTargetId =
UniqueApidTargetId::new(Apid::Eps as u16, EpsId::Pcdu as u32);
pub const UDP_SERVER: UniqueApidTargetId =

View File

@ -8,15 +8,19 @@ use derive_new::new;
use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs::{
hk::{HkRequest, HkRequestVariant},
mode::{ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler},
mode::{
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler,
ModeRequestHandlerMpscBounded,
},
mode_tree::{ModeChild, ModeNode},
power::SwitchRequest,
pus::{EcssTmSender, PusTmVariant},
queue::{GenericSendError, GenericTargetedMessagingError},
queue::GenericSendError,
request::{GenericMessage, MessageMetadata, UniqueApidTargetId},
spacepackets::ByteConversionError,
};
use satrs_example::{
config::components::{NO_SENDER, PUS_MODE_SERVICE},
config::components::{NO_SENDER, PCDU_HANDLER, PUS_MODE_SERVICE},
DeviceMode, TimestampHelper,
};
use satrs_minisim::{
@ -28,7 +32,6 @@ use satrs_minisim::{
use serde::{Deserialize, Serialize};
use crate::{
acs::mgm::MpscModeLeafInterface,
hk::PusHkHelper,
pus::hk::{HkReply, HkReplyVariant},
requests::CompositeRequest,
@ -203,9 +206,9 @@ pub type SharedSwitchSet = Arc<Mutex<SwitchSet>>;
pub struct PcduHandler<ComInterface: SerialInterface, TmSender: EcssTmSender> {
id: UniqueApidTargetId,
dev_str: &'static str,
mode_interface: MpscModeLeafInterface,
mode_node: ModeRequestHandlerMpscBounded,
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
hk_reply_tx: mpsc::Sender<GenericMessage<HkReply>>,
hk_reply_tx: mpsc::SyncSender<GenericMessage<HkReply>>,
switch_request_rx: mpsc::Receiver<GenericMessage<SwitchRequest>>,
tm_sender: TmSender,
pub com_interface: ComInterface,
@ -324,25 +327,30 @@ impl<ComInterface: SerialInterface, TmSender: EcssTmSender> PcduHandler<ComInter
pub fn handle_mode_requests(&mut self) {
loop {
// TODO: Only allow one set mode request per cycle?
match self.mode_interface.request_rx.try_recv() {
Ok(msg) => {
let result = self.handle_mode_request(msg);
// TODO: Trigger event?
if result.is_err() {
log::warn!(
"{}: mode request failed with error {:?}",
self.dev_str,
result.err().unwrap()
);
}
}
Err(e) => {
if e != mpsc::TryRecvError::Empty {
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
match self.mode_node.try_recv_mode_request() {
Ok(opt_msg) => {
if let Some(msg) = opt_msg {
let result = self.handle_mode_request(msg);
// TODO: Trigger event?
if result.is_err() {
log::warn!(
"{}: mode request failed with error {:?}",
self.dev_str,
result.err().unwrap()
);
}
} else {
break;
}
}
Err(e) => match e {
satrs::queue::GenericReceiveError::Empty => {
break;
}
satrs::queue::GenericReceiveError::TxDisconnected(_) => {
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
}
},
}
}
}
@ -412,6 +420,7 @@ impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeRequestHandler
&mut self,
requestor: MessageMetadata,
mode_and_submode: ModeAndSubmode,
_forced: bool,
) -> Result<(), satrs::mode::ModeError> {
log::info!(
"{}: transitioning to mode {:?}",
@ -466,10 +475,9 @@ impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeRequestHandler
requestor.sender_id()
);
}
self.mode_interface
.reply_to_pus_tx
.send(GenericMessage::new(requestor, reply))
.map_err(|_| GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected))?;
self.mode_node
.send_mode_reply(requestor, reply)
.map_err(|_| GenericSendError::RxDisconnected)?;
Ok(())
}
@ -482,6 +490,24 @@ impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeRequestHandler
}
}
impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeNode
for PcduHandler<ComInterface, TmSender>
{
fn id(&self) -> satrs::ComponentId {
PCDU_HANDLER.into()
}
}
impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeChild
for PcduHandler<ComInterface, TmSender>
{
type Sender = mpsc::SyncSender<GenericMessage<ModeReply>>;
fn add_mode_parent(&mut self, id: satrs::ComponentId, reply_sender: Self::Sender) {
self.mode_node.add_message_target(id, reply_sender);
}
}
#[cfg(test)]
mod tests {
use std::sync::mpsc;
@ -489,7 +515,7 @@ mod tests {
use satrs::{
mode::ModeRequest, power::SwitchStateBinary, request::GenericMessage, tmtc::PacketAsVec,
};
use satrs_example::config::components::{Apid, MGM_HANDLER_0};
use satrs_example::config::components::{Apid, EPS_SUBSYSTEM, MGM_HANDLER_0, PCDU_HANDLER};
use satrs_minisim::eps::SwitchMapBinary;
use super::*;
@ -530,7 +556,7 @@ mod tests {
}
pub struct PcduTestbench {
pub mode_request_tx: mpsc::Sender<GenericMessage<ModeRequest>>,
pub mode_request_tx: mpsc::SyncSender<GenericMessage<ModeRequest>>,
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
@ -542,19 +568,29 @@ mod tests {
impl PcduTestbench {
pub fn new() -> Self {
let (mode_request_tx, mode_request_rx) = mpsc::channel();
let (mode_reply_tx_to_pus, mode_reply_rx_to_pus) = mpsc::channel();
let (mode_request_tx, mode_request_rx) = mpsc::sync_channel(5);
let (mode_reply_tx_to_pus, mode_reply_rx_to_pus) = mpsc::sync_channel(5);
let (mode_reply_tx_to_parent, mode_reply_rx_to_parent) = mpsc::sync_channel(5);
let mode_interface = MpscModeLeafInterface {
request_rx: mode_request_rx,
reply_to_pus_tx: mode_reply_tx_to_pus,
reply_to_parent_tx: mode_reply_tx_to_parent,
};
let mode_node =
ModeRequestHandlerMpscBounded::new(PCDU_HANDLER.into(), mode_request_rx);
let (composite_request_tx, composite_request_rx) = mpsc::channel();
let (hk_reply_tx, hk_reply_rx) = mpsc::channel();
let (tm_tx, tm_rx) = mpsc::channel::<PacketAsVec>();
let (switch_request_tx, switch_reqest_rx) = mpsc::channel();
let shared_switch_map = Arc::new(Mutex::new(SwitchSet::default()));
let mut handler = PcduHandler::new(
UniqueApidTargetId::new(Apid::Eps as u16, 0),
"TEST_PCDU",
mode_node,
composite_request_rx,
hk_reply_tx,
switch_reqest_rx,
tm_tx,
SerialInterfaceTest::default(),
shared_switch_map,
);
handler.add_mode_parent(EPS_SUBSYSTEM.into(), mode_reply_tx_to_parent);
handler.add_mode_parent(PUS_MODE_SERVICE.into(), mode_reply_tx_to_pus);
Self {
mode_request_tx,
mode_reply_rx_to_pus,
@ -563,17 +599,7 @@ mod tests {
hk_reply_rx,
tm_rx,
switch_request_tx,
handler: PcduHandler::new(
UniqueApidTargetId::new(Apid::Eps as u16, 0),
"TEST_PCDU",
mode_interface,
composite_request_rx,
hk_reply_tx,
switch_reqest_rx,
tm_tx,
SerialInterfaceTest::default(),
shared_switch_map,
),
handler,
}
}
@ -660,7 +686,10 @@ mod tests {
.mode_request_tx
.send(GenericMessage::new(
MessageMetadata::new(0, PUS_MODE_SERVICE.id()),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as u32, 0)),
ModeRequest::SetMode {
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as u32, 0),
forced: false,
},
))
.expect("failed to send mode request");
let switch_map_shared = testbench.handler.shared_switch_map.lock().unwrap();
@ -692,7 +721,10 @@ mod tests {
.mode_request_tx
.send(GenericMessage::new(
MessageMetadata::new(0, PUS_MODE_SERVICE.id()),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as u32, 0)),
ModeRequest::SetMode {
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as u32, 0),
forced: false,
},
))
.expect("failed to send mode request");
testbench

View File

@ -1,17 +1,16 @@
use std::time::Duration;
use std::{
collections::{HashSet, VecDeque},
fmt::Debug,
marker::PhantomData,
sync::{Arc, Mutex},
};
use log::{info, warn};
use satrs::tmtc::{StoreAndSendError, TcSender};
use satrs::{
encoding::ccsds::{SpValidity, SpacePacketValidator},
hal::std::tcp_server::{HandledConnectionHandler, ServerConfig, TcpSpacepacketsServer},
spacepackets::{CcsdsPacket, PacketId},
tmtc::{PacketSenderRaw, PacketSource},
tmtc::PacketSource,
};
#[derive(Default)]
@ -111,31 +110,23 @@ pub type TcpServer<ReceivesTc, SendError> = TcpSpacepacketsServer<
SendError,
>;
pub struct TcpTask<TcSender: PacketSenderRaw<Error = SendError>, SendError: Debug + 'static>(
pub TcpServer<TcSender, SendError>,
PhantomData<SendError>,
);
pub struct TcpTask(pub TcpServer<TcSender, StoreAndSendError>);
impl<TcSender: PacketSenderRaw<Error = SendError>, SendError: Debug + 'static>
TcpTask<TcSender, SendError>
{
impl TcpTask {
pub fn new(
cfg: ServerConfig,
tm_source: SyncTcpTmSource,
tc_sender: TcSender,
valid_ids: HashSet<PacketId>,
) -> Result<Self, std::io::Error> {
Ok(Self(
TcpSpacepacketsServer::new(
cfg,
tm_source,
tc_sender,
SimplePacketValidator { valid_ids },
ConnectionFinishedHandler::default(),
None,
)?,
PhantomData,
))
Ok(Self(TcpSpacepacketsServer::new(
cfg,
tm_source,
tc_sender,
SimplePacketValidator { valid_ids },
ConnectionFinishedHandler::default(),
None,
)?))
}
pub fn periodic_operation(&mut self) {

View File

@ -1,10 +1,9 @@
use core::fmt::Debug;
use std::net::{SocketAddr, UdpSocket};
use std::sync::mpsc;
use log::{info, warn};
use satrs::pus::HandlingStatus;
use satrs::tmtc::{PacketAsVec, PacketInPool, PacketSenderRaw};
use satrs::tmtc::{PacketAsVec, PacketInPool, StoreAndSendError, TcSender};
use satrs::{
hal::std::udp_server::{ReceiveResult, UdpTcServer},
pool::{PoolProviderWithGuards, SharedStaticMemoryPool},
@ -65,21 +64,12 @@ impl UdpTmHandler for DynamicUdpTmHandler {
}
}
pub struct UdpTmtcServer<
TcSender: PacketSenderRaw<Error = SendError>,
TmHandler: UdpTmHandler,
SendError,
> {
pub udp_tc_server: UdpTcServer<TcSender, SendError>,
pub struct UdpTmtcServer<TmHandler: UdpTmHandler> {
pub udp_tc_server: UdpTcServer<TcSender, StoreAndSendError>,
pub tm_handler: TmHandler,
}
impl<
TcSender: PacketSenderRaw<Error = SendError>,
TmHandler: UdpTmHandler,
SendError: Debug + 'static,
> UdpTmtcServer<TcSender, TmHandler, SendError>
{
impl<TmHandler: UdpTmHandler> UdpTmtcServer<TmHandler> {
pub fn periodic_operation(&mut self) {
loop {
if self.poll_tc_server() == HandlingStatus::Empty {

View File

@ -1,3 +1,73 @@
use std::{
net::{IpAddr, SocketAddr},
sync::{mpsc, Arc, Mutex},
thread,
time::Duration,
};
use acs::mgm::{MgmHandlerLis3Mdl, SpiDummyInterface, SpiSimInterface, SpiSimInterfaceWrapper};
use eps::{
pcdu::{PcduHandler, SerialInterfaceDummy, SerialInterfaceToSim, SerialSimInterfaceWrapper},
PowerSwitchHelper,
};
use events::EventHandler;
use interface::{
sim_client_udp::create_sim_client,
tcp::{SyncTcpTmSource, TcpTask},
udp::UdpTmtcServer,
};
use log::info;
use logger::setup_logger;
use pus::{
action::create_action_service,
event::create_event_service,
hk::create_hk_service,
mode::create_mode_service,
scheduler::{create_scheduler_service, TcReleaser},
stack::PusStack,
test::create_test_service,
PusTcDistributor, PusTcMpscRouter,
};
use requests::GenericRequestRouter;
use satrs::{
hal::std::{tcp_server::ServerConfig, udp_server::UdpTcServer},
mode::{Mode, ModeAndSubmode, ModeRequest, ModeRequestHandlerMpscBounded},
mode_tree::connect_mode_nodes,
pus::{event_man::EventRequestWithToken, EcssTcInMemConverter, HandlingStatus},
request::{GenericMessage, MessageMetadata},
spacepackets::time::{cds::CdsTime, TimeWriter},
tmtc::TcSender,
};
use satrs_example::{
config::{
components::{MGM_HANDLER_0, NO_SENDER, PCDU_HANDLER, TCP_SERVER, UDP_SERVER},
pool::create_sched_tc_pool,
tasks::{FREQ_MS_AOCS, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC, SIM_CLIENT_IDLE_DELAY_MS},
OBSW_SERVER_ADDR, PACKET_ID_VALIDATOR, SERVER_PORT,
},
DeviceMode,
};
use tm_sender::TmSender;
use tmtc::{tc_source::TcSourceTask, tm_sink::TmSink};
cfg_if::cfg_if! {
if #[cfg(feature = "heap_tmtc")] {
use interface::udp::DynamicUdpTmHandler;
use satrs::pus::EcssTcInVecConverter;
use tmtc::{tc_source::TcSourceTaskDynamic, tm_sink::TmSinkDynamic};
} else {
use std::sync::RwLock;
use interface::udp::StaticUdpTmHandler;
use satrs::pus::EcssTcInSharedPoolConverter;
use satrs::tmtc::{PacketSenderWithSharedPool, SharedPacketPool};
use satrs_example::config::pool::create_static_pools;
use tmtc::{
tc_source::TcSourceTaskStatic,
tm_sink::TmSinkStatic,
};
}
}
mod acs;
mod eps;
mod events;
@ -6,106 +76,69 @@ mod interface;
mod logger;
mod pus;
mod requests;
mod spi;
mod tm_sender;
mod tmtc;
use crate::eps::pcdu::{
PcduHandler, SerialInterfaceDummy, SerialInterfaceToSim, SerialSimInterfaceWrapper,
};
use crate::eps::PowerSwitchHelper;
use crate::events::EventHandler;
use crate::interface::udp::DynamicUdpTmHandler;
use crate::pus::stack::PusStack;
use crate::tmtc::tc_source::{TcSourceTaskDynamic, TcSourceTaskStatic};
use crate::tmtc::tm_sink::{TmSinkDynamic, TmSinkStatic};
use log::info;
use pus::test::create_test_service_dynamic;
use satrs::hal::std::tcp_server::ServerConfig;
use satrs::hal::std::udp_server::UdpTcServer;
use satrs::pus::HandlingStatus;
use satrs::request::{GenericMessage, MessageMetadata};
use satrs::tmtc::{PacketSenderWithSharedPool, SharedPacketPool};
use satrs_example::config::pool::{create_sched_tc_pool, create_static_pools};
use satrs_example::config::tasks::{
FREQ_MS_AOCS, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC, SIM_CLIENT_IDLE_DELAY_MS,
};
use satrs_example::config::{OBSW_SERVER_ADDR, PACKET_ID_VALIDATOR, SERVER_PORT};
use satrs_example::DeviceMode;
fn main() {
setup_logger().expect("setting up logging with fern failed");
println!("Running OBSW example");
use crate::acs::mgm::{
MgmHandlerLis3Mdl, MpscModeLeafInterface, SpiDummyInterface, SpiSimInterface,
SpiSimInterfaceWrapper,
};
use crate::interface::sim_client_udp::create_sim_client;
use crate::interface::tcp::{SyncTcpTmSource, TcpTask};
use crate::interface::udp::{StaticUdpTmHandler, UdpTmtcServer};
use crate::logger::setup_logger;
use crate::pus::action::{create_action_service_dynamic, create_action_service_static};
use crate::pus::event::{create_event_service_dynamic, create_event_service_static};
use crate::pus::hk::{create_hk_service_dynamic, create_hk_service_static};
use crate::pus::mode::{create_mode_service_dynamic, create_mode_service_static};
use crate::pus::scheduler::{create_scheduler_service_dynamic, create_scheduler_service_static};
use crate::pus::test::create_test_service_static;
use crate::pus::{PusTcDistributor, PusTcMpscRouter};
use crate::requests::{CompositeRequest, GenericRequestRouter};
use satrs::mode::{Mode, ModeAndSubmode, ModeRequest};
use satrs::pus::event_man::EventRequestWithToken;
use satrs::spacepackets::{time::cds::CdsTime, time::TimeWriter};
use satrs_example::config::components::{
MGM_HANDLER_0, NO_SENDER, PCDU_HANDLER, TCP_SERVER, UDP_SERVER,
};
use std::net::{IpAddr, SocketAddr};
use std::sync::{mpsc, Mutex};
use std::sync::{Arc, RwLock};
use std::thread;
use std::time::Duration;
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let (tm_pool, tc_pool) = create_static_pools();
let shared_tm_pool = Arc::new(RwLock::new(tm_pool));
let shared_tc_pool = Arc::new(RwLock::new(tc_pool));
let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool);
let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool);
}
}
#[allow(dead_code)]
fn static_tmtc_pool_main() {
let (tm_pool, tc_pool) = create_static_pools();
let shared_tm_pool = Arc::new(RwLock::new(tm_pool));
let shared_tc_pool = Arc::new(RwLock::new(tc_pool));
let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool);
let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool);
let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50);
let (tm_sink_tx, tm_sink_rx) = mpsc::sync_channel(50);
let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
let tm_sink_tx_sender =
PacketSenderWithSharedPool::new(tm_sink_tx.clone(), shared_tm_pool_wrapper.clone());
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let tm_sender = TmSender::Static(
PacketSenderWithSharedPool::new(tm_sink_tx.clone(), shared_tm_pool_wrapper.clone())
);
} else if #[cfg(feature = "heap_tmtc")] {
let tm_sender = TmSender::Heap(tm_sink_tx.clone());
}
}
let (sim_request_tx, sim_request_rx) = mpsc::channel();
let (mgm_sim_reply_tx, mgm_sim_reply_rx) = mpsc::channel();
let (pcdu_sim_reply_tx, pcdu_sim_reply_rx) = mpsc::channel();
let mut opt_sim_client = create_sim_client(sim_request_rx);
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(10);
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) =
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(30);
let (mgm_handler_mode_tx, mgm_handler_mode_rx) =
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) =
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
let (mgm_handler_composite_tx, mgm_handler_composite_rx) = mpsc::sync_channel(10);
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) = mpsc::sync_channel(30);
let (mgm_handler_mode_tx, mgm_handler_mode_rx) = mpsc::sync_channel(5);
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) = mpsc::sync_channel(5);
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let mut request_map = GenericRequestRouter::default();
request_map
.composite_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_composite_tx);
request_map
.mode_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_mode_tx);
request_map
.composite_router_map
.insert(PCDU_HANDLER.id(), pcdu_handler_composite_tx);
request_map
.mode_router_map
.insert(PCDU_HANDLER.id(), pcdu_handler_mode_tx.clone());
// This helper structure is used by all telecommand providers which need to send telecommands
// to the TC source.
let tc_source = PacketSenderWithSharedPool::new(tc_source_tx, shared_tc_pool_wrapper.clone());
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let tc_sender_with_shared_pool =
PacketSenderWithSharedPool::new(tc_source_tx, shared_tc_pool_wrapper.clone());
let tc_in_mem_converter =
EcssTcInMemConverter::Static(EcssTcInSharedPoolConverter::new(shared_tc_pool, 4096));
} else if #[cfg(feature = "heap_tmtc")] {
let tc_in_mem_converter = EcssTcInMemConverter::Heap(EcssTcInVecConverter::default());
}
}
// Create event handling components
// These sender handles are used to send event requests, for example to enable or disable
@ -117,17 +150,24 @@ fn static_tmtc_pool_main() {
// in the sat-rs documentation.
let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
let (pus_test_tx, pus_test_rx) = mpsc::channel();
let (pus_event_tx, pus_event_rx) = mpsc::channel();
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
let (pus_test_tx, pus_test_rx) = mpsc::sync_channel(20);
let (pus_event_tx, pus_event_rx) = mpsc::sync_channel(10);
let (pus_sched_tx, pus_sched_rx) = mpsc::sync_channel(50);
let (pus_hk_tx, pus_hk_rx) = mpsc::sync_channel(50);
let (pus_action_tx, pus_action_rx) = mpsc::sync_channel(50);
let (pus_mode_tx, pus_mode_rx) = mpsc::sync_channel(50);
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::sync_channel(50);
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::sync_channel(30);
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let tc_releaser = TcReleaser::Static(tc_sender_with_shared_pool.clone());
} else if #[cfg(feature = "heap_tmtc")] {
let tc_releaser = TcReleaser::Heap(tc_source_tx.clone());
}
}
let pus_router = PusTcMpscRouter {
test_tc_sender: pus_test_tx,
event_tc_sender: pus_event_tx,
@ -136,41 +176,42 @@ fn static_tmtc_pool_main() {
action_tc_sender: pus_action_tx,
mode_tc_sender: pus_mode_tx,
};
let pus_test_service = create_test_service_static(
tm_sink_tx_sender.clone(),
shared_tc_pool.clone(),
let pus_test_service = create_test_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
event_tx.clone(),
pus_test_rx,
);
let pus_scheduler_service = create_scheduler_service_static(
tm_sink_tx_sender.clone(),
tc_source.clone(),
let pus_scheduler_service = create_scheduler_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
tc_releaser,
pus_sched_rx,
create_sched_tc_pool(),
);
let pus_event_service = create_event_service_static(
tm_sink_tx_sender.clone(),
shared_tc_pool.clone(),
let pus_event_service = create_event_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
pus_event_rx,
event_request_tx,
);
let pus_action_service = create_action_service_static(
tm_sink_tx_sender.clone(),
shared_tc_pool.clone(),
let pus_action_service = create_action_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
pus_action_rx,
request_map.clone(),
pus_action_reply_rx,
);
let pus_hk_service = create_hk_service_static(
tm_sink_tx_sender.clone(),
shared_tc_pool.clone(),
let pus_hk_service = create_hk_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
pus_hk_rx,
request_map.clone(),
pus_hk_reply_rx,
);
let pus_mode_service = create_mode_service_static(
tm_sink_tx_sender.clone(),
shared_tc_pool.clone(),
let pus_mode_service = create_mode_service(
tm_sender.clone(),
tc_in_mem_converter.clone(),
pus_mode_rx,
request_map,
pus_mode_reply_rx,
@ -184,21 +225,36 @@ fn static_tmtc_pool_main() {
pus_mode_service,
);
let mut tmtc_task = TcSourceTaskStatic::new(
shared_tc_pool_wrapper.clone(),
tc_source_rx,
PusTcDistributor::new(tm_sink_tx_sender, pus_router),
);
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let mut tmtc_task = TcSourceTask::Static(TcSourceTaskStatic::new(
shared_tc_pool_wrapper.clone(),
tc_source_rx,
PusTcDistributor::new(tm_sender.clone(), pus_router),
));
let tc_sender = TcSender::StaticStore(tc_sender_with_shared_pool);
let udp_tm_handler = StaticUdpTmHandler {
tm_rx: tm_server_rx,
tm_store: shared_tm_pool.clone(),
};
} else if #[cfg(feature = "heap_tmtc")] {
let mut tmtc_task = TcSourceTask::Heap(TcSourceTaskDynamic::new(
tc_source_rx,
PusTcDistributor::new(tm_sender.clone(), pus_router),
));
let tc_sender = TcSender::HeapStore(tc_source_tx.clone());
let udp_tm_handler = DynamicUdpTmHandler {
tm_rx: tm_server_rx,
};
}
}
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_source.clone())
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_sender.clone())
.expect("creating UDP TMTC server failed");
let mut udp_tmtc_server = UdpTmtcServer {
udp_tc_server,
tm_handler: StaticUdpTmHandler {
tm_rx: tm_server_rx,
tm_store: shared_tm_pool.clone(),
},
tm_handler: udp_tm_handler,
};
let tcp_server_cfg = ServerConfig::new(
@ -212,32 +268,35 @@ fn static_tmtc_pool_main() {
let mut tcp_server = TcpTask::new(
tcp_server_cfg,
sync_tm_tcp_source.clone(),
tc_source.clone(),
tc_sender,
PACKET_ID_VALIDATOR.clone(),
)
.expect("tcp server creation failed");
let mut tm_sink = TmSinkStatic::new(
shared_tm_pool_wrapper,
sync_tm_tcp_source,
tm_sink_rx,
tm_server_tx,
);
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
mpsc::sync_channel(5);
cfg_if::cfg_if! {
if #[cfg(not(feature = "heap_tmtc"))] {
let mut tm_sink = TmSink::Static(TmSinkStatic::new(
shared_tm_pool_wrapper,
sync_tm_tcp_source,
tm_sink_rx,
tm_server_tx,
));
} else if #[cfg(feature = "heap_tmtc")] {
let mut tm_sink = TmSink::Heap(TmSinkDynamic::new(
sync_tm_tcp_source,
tm_sink_rx,
tm_server_tx,
));
}
}
let shared_switch_set = Arc::new(Mutex::default());
let (switch_request_tx, switch_request_rx) = mpsc::sync_channel(20);
let switch_helper = PowerSwitchHelper::new(switch_request_tx, shared_switch_set.clone());
let shared_mgm_set = Arc::default();
let mgm_mode_leaf_interface = MpscModeLeafInterface {
request_rx: mgm_handler_mode_rx,
reply_to_pus_tx: pus_mode_reply_tx.clone(),
reply_to_parent_tx: mgm_handler_mode_reply_to_parent_tx,
};
let mgm_mode_node =
ModeRequestHandlerMpscBounded::new(MGM_HANDLER_0.into(), mgm_handler_mode_rx);
let mgm_spi_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
sim_client.add_reply_recipient(satrs_minisim::SimComponent::MgmLis3Mdl, mgm_sim_reply_tx);
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
@ -250,22 +309,22 @@ fn static_tmtc_pool_main() {
let mut mgm_handler = MgmHandlerLis3Mdl::new(
MGM_HANDLER_0,
"MGM_0",
mgm_mode_leaf_interface,
mgm_mode_node,
mgm_handler_composite_rx,
pus_hk_reply_tx.clone(),
switch_helper.clone(),
tm_sink_tx.clone(),
tm_sender,
mgm_spi_interface,
shared_mgm_set,
);
// Connect PUS service to device handler.
connect_mode_nodes(
&mut pus_stack.mode_srv,
mgm_handler_mode_tx,
&mut mgm_handler,
pus_mode_reply_tx.clone(),
);
let (pcdu_handler_mode_reply_to_parent_tx, _pcdu_handler_mode_reply_to_parent_rx) =
mpsc::sync_channel(10);
let pcdu_mode_leaf_interface = MpscModeLeafInterface {
request_rx: pcdu_handler_mode_rx,
reply_to_pus_tx: pus_mode_reply_tx,
reply_to_parent_tx: pcdu_handler_mode_reply_to_parent_tx,
};
let pcdu_serial_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
sim_client.add_reply_recipient(satrs_minisim::SimComponent::Pcdu, pcdu_sim_reply_tx);
SerialSimInterfaceWrapper::Sim(SerialInterfaceToSim::new(
@ -275,11 +334,12 @@ fn static_tmtc_pool_main() {
} else {
SerialSimInterfaceWrapper::Dummy(SerialInterfaceDummy::default())
};
let pcdu_mode_node =
ModeRequestHandlerMpscBounded::new(PCDU_HANDLER.into(), pcdu_handler_mode_rx);
let mut pcdu_handler = PcduHandler::new(
PCDU_HANDLER,
"PCDU",
pcdu_mode_leaf_interface,
pcdu_mode_node,
pcdu_handler_composite_rx,
pus_hk_reply_tx,
switch_request_rx,
@ -287,11 +347,21 @@ fn static_tmtc_pool_main() {
pcdu_serial_interface,
shared_switch_set,
);
connect_mode_nodes(
&mut pus_stack.mode_srv,
pcdu_handler_mode_tx.clone(),
&mut pcdu_handler,
pus_mode_reply_tx,
);
// The PCDU is a critical component which should be in normal mode immediately.
pcdu_handler_mode_tx
.send(GenericMessage::new(
MessageMetadata::new(0, NO_SENDER),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as Mode, 0)),
ModeRequest::SetMode {
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as Mode, 0),
forced: false,
},
))
.expect("sending initial mode request failed");
@ -357,11 +427,13 @@ fn static_tmtc_pool_main() {
.spawn(move || loop {
// TODO: We should introduce something like a fixed timeslot helper to allow a more
// declarative API. It would also be very useful for the AOCS task.
pcdu_handler.periodic_operation(eps::pcdu::OpCode::RegularOp);
//
// TODO: The fixed timeslot handler exists.. use it.
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::RegularOp);
thread::sleep(Duration::from_millis(50));
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::PollAndRecvReplies);
thread::sleep(Duration::from_millis(50));
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::PollAndRecvReplies);
thread::sleep(Duration::from_millis(300));
})
.unwrap();
@ -397,322 +469,6 @@ fn static_tmtc_pool_main() {
.expect("Joining PUS handler thread failed");
}
#[allow(dead_code)]
fn dyn_tmtc_pool_main() {
let (tc_source_tx, tc_source_rx) = mpsc::channel();
let (tm_sink_tx, tm_sink_rx) = mpsc::channel();
let (tm_server_tx, tm_server_rx) = mpsc::channel();
let (sim_request_tx, sim_request_rx) = mpsc::channel();
let (mgm_sim_reply_tx, mgm_sim_reply_rx) = mpsc::channel();
let (pcdu_sim_reply_tx, pcdu_sim_reply_rx) = mpsc::channel();
let mut opt_sim_client = create_sim_client(sim_request_rx);
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(5);
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) =
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(10);
let (mgm_handler_mode_tx, mgm_handler_mode_rx) =
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) =
mpsc::sync_channel::<GenericMessage<ModeRequest>>(10);
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
let mut request_map = GenericRequestRouter::default();
request_map
.composite_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_composite_tx);
request_map
.mode_router_map
.insert(MGM_HANDLER_0.id(), mgm_handler_mode_tx);
request_map
.composite_router_map
.insert(PCDU_HANDLER.id(), pcdu_handler_composite_tx);
request_map
.mode_router_map
.insert(PCDU_HANDLER.id(), pcdu_handler_mode_tx.clone());
// Create event handling components
// These sender handles are used to send event requests, for example to enable or disable
// certain events.
let (event_tx, event_rx) = mpsc::sync_channel(100);
let (event_request_tx, event_request_rx) = mpsc::channel::<EventRequestWithToken>();
// The event task is the core handler to perform the event routing and TM handling as specified
// in the sat-rs documentation.
let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
let (pus_test_tx, pus_test_rx) = mpsc::channel();
let (pus_event_tx, pus_event_rx) = mpsc::channel();
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
let (pus_action_tx, pus_action_rx) = mpsc::channel();
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
let pus_router = PusTcMpscRouter {
test_tc_sender: pus_test_tx,
event_tc_sender: pus_event_tx,
sched_tc_sender: pus_sched_tx,
hk_tc_sender: pus_hk_tx,
action_tc_sender: pus_action_tx,
mode_tc_sender: pus_mode_tx,
};
let pus_test_service =
create_test_service_dynamic(tm_sink_tx.clone(), event_tx.clone(), pus_test_rx);
let pus_scheduler_service = create_scheduler_service_dynamic(
tm_sink_tx.clone(),
tc_source_tx.clone(),
pus_sched_rx,
create_sched_tc_pool(),
);
let pus_event_service =
create_event_service_dynamic(tm_sink_tx.clone(), pus_event_rx, event_request_tx);
let pus_action_service = create_action_service_dynamic(
tm_sink_tx.clone(),
pus_action_rx,
request_map.clone(),
pus_action_reply_rx,
);
let pus_hk_service = create_hk_service_dynamic(
tm_sink_tx.clone(),
pus_hk_rx,
request_map.clone(),
pus_hk_reply_rx,
);
let pus_mode_service = create_mode_service_dynamic(
tm_sink_tx.clone(),
pus_mode_rx,
request_map,
pus_mode_reply_rx,
);
let mut pus_stack = PusStack::new(
pus_test_service,
pus_hk_service,
pus_event_service,
pus_action_service,
pus_scheduler_service,
pus_mode_service,
);
let mut tmtc_task = TcSourceTaskDynamic::new(
tc_source_rx,
PusTcDistributor::new(tm_sink_tx.clone(), pus_router),
);
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_source_tx.clone())
.expect("creating UDP TMTC server failed");
let mut udp_tmtc_server = UdpTmtcServer {
udp_tc_server,
tm_handler: DynamicUdpTmHandler {
tm_rx: tm_server_rx,
},
};
let tcp_server_cfg = ServerConfig::new(
TCP_SERVER.id(),
sock_addr,
Duration::from_millis(400),
4096,
8192,
);
let sync_tm_tcp_source = SyncTcpTmSource::new(200);
let mut tcp_server = TcpTask::new(
tcp_server_cfg,
sync_tm_tcp_source.clone(),
tc_source_tx.clone(),
PACKET_ID_VALIDATOR.clone(),
)
.expect("tcp server creation failed");
let mut tm_funnel = TmSinkDynamic::new(sync_tm_tcp_source, tm_sink_rx, tm_server_tx);
let shared_switch_set = Arc::new(Mutex::default());
let (switch_request_tx, switch_request_rx) = mpsc::sync_channel(20);
let switch_helper = PowerSwitchHelper::new(switch_request_tx, shared_switch_set.clone());
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
mpsc::sync_channel(5);
let shared_mgm_set = Arc::default();
let mode_leaf_interface = MpscModeLeafInterface {
request_rx: mgm_handler_mode_rx,
reply_to_pus_tx: pus_mode_reply_tx.clone(),
reply_to_parent_tx: mgm_handler_mode_reply_to_parent_tx,
};
let mgm_spi_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
sim_client.add_reply_recipient(satrs_minisim::SimComponent::MgmLis3Mdl, mgm_sim_reply_tx);
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
sim_request_tx: sim_request_tx.clone(),
sim_reply_rx: mgm_sim_reply_rx,
})
} else {
SpiSimInterfaceWrapper::Dummy(SpiDummyInterface::default())
};
let mut mgm_handler = MgmHandlerLis3Mdl::new(
MGM_HANDLER_0,
"MGM_0",
mode_leaf_interface,
mgm_handler_composite_rx,
pus_hk_reply_tx.clone(),
switch_helper.clone(),
tm_sink_tx.clone(),
mgm_spi_interface,
shared_mgm_set,
);
let (pcdu_handler_mode_reply_to_parent_tx, _pcdu_handler_mode_reply_to_parent_rx) =
mpsc::sync_channel(10);
let pcdu_mode_leaf_interface = MpscModeLeafInterface {
request_rx: pcdu_handler_mode_rx,
reply_to_pus_tx: pus_mode_reply_tx,
reply_to_parent_tx: pcdu_handler_mode_reply_to_parent_tx,
};
let pcdu_serial_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
sim_client.add_reply_recipient(satrs_minisim::SimComponent::Pcdu, pcdu_sim_reply_tx);
SerialSimInterfaceWrapper::Sim(SerialInterfaceToSim::new(
sim_request_tx.clone(),
pcdu_sim_reply_rx,
))
} else {
SerialSimInterfaceWrapper::Dummy(SerialInterfaceDummy::default())
};
let mut pcdu_handler = PcduHandler::new(
PCDU_HANDLER,
"PCDU",
pcdu_mode_leaf_interface,
pcdu_handler_composite_rx,
pus_hk_reply_tx,
switch_request_rx,
tm_sink_tx,
pcdu_serial_interface,
shared_switch_set,
);
// The PCDU is a critical component which should be in normal mode immediately.
pcdu_handler_mode_tx
.send(GenericMessage::new(
MessageMetadata::new(0, NO_SENDER),
ModeRequest::SetMode(ModeAndSubmode::new(DeviceMode::Normal as Mode, 0)),
))
.expect("sending initial mode request failed");
info!("Starting TMTC and UDP task");
let jh_udp_tmtc = thread::Builder::new()
.name("sat-rs tmtc-udp".to_string())
.spawn(move || {
info!("Running UDP server on port {SERVER_PORT}");
loop {
udp_tmtc_server.periodic_operation();
tmtc_task.periodic_operation();
thread::sleep(Duration::from_millis(FREQ_MS_UDP_TMTC));
}
})
.unwrap();
info!("Starting TCP task");
let jh_tcp = thread::Builder::new()
.name("sat-rs tcp".to_string())
.spawn(move || {
info!("Running TCP server on port {SERVER_PORT}");
loop {
tcp_server.periodic_operation();
}
})
.unwrap();
info!("Starting TM funnel task");
let jh_tm_funnel = thread::Builder::new()
.name("sat-rs tm-sink".to_string())
.spawn(move || loop {
tm_funnel.operation();
})
.unwrap();
let mut opt_jh_sim_client = None;
if let Some(mut sim_client) = opt_sim_client {
info!("Starting UDP sim client task");
opt_jh_sim_client = Some(
thread::Builder::new()
.name("sat-rs sim adapter".to_string())
.spawn(move || loop {
if sim_client.operation() == HandlingStatus::Empty {
std::thread::sleep(Duration::from_millis(SIM_CLIENT_IDLE_DELAY_MS));
}
})
.unwrap(),
);
}
info!("Starting AOCS thread");
let jh_aocs = thread::Builder::new()
.name("sat-rs aocs".to_string())
.spawn(move || loop {
mgm_handler.periodic_operation();
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
})
.unwrap();
info!("Starting EPS thread");
let jh_eps = thread::Builder::new()
.name("sat-rs eps".to_string())
.spawn(move || loop {
// TODO: We should introduce something like a fixed timeslot helper to allow a more
// declarative API. It would also be very useful for the AOCS task.
pcdu_handler.periodic_operation(eps::pcdu::OpCode::RegularOp);
thread::sleep(Duration::from_millis(50));
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
thread::sleep(Duration::from_millis(50));
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
thread::sleep(Duration::from_millis(300));
})
.unwrap();
info!("Starting PUS handler thread");
let jh_pus_handler = thread::Builder::new()
.name("sat-rs pus".to_string())
.spawn(move || loop {
pus_stack.periodic_operation();
event_handler.periodic_operation();
thread::sleep(Duration::from_millis(FREQ_MS_PUS_STACK));
})
.unwrap();
jh_udp_tmtc
.join()
.expect("Joining UDP TMTC server thread failed");
jh_tcp
.join()
.expect("Joining TCP TMTC server thread failed");
jh_tm_funnel
.join()
.expect("Joining TM Funnel thread failed");
if let Some(jh_sim_client) = opt_jh_sim_client {
jh_sim_client
.join()
.expect("Joining SIM client thread failed");
}
jh_aocs.join().expect("Joining AOCS thread failed");
jh_eps.join().expect("Joining EPS thread failed");
jh_pus_handler
.join()
.expect("Joining PUS handler thread failed");
}
fn main() {
setup_logger().expect("setting up logging with fern failed");
println!("Running OBSW example");
#[cfg(not(feature = "dyn_tmtc"))]
static_tmtc_pool_main();
#[cfg(feature = "dyn_tmtc")]
dyn_tmtc_pool_main();
}
pub fn update_time(time_provider: &mut CdsTime, timestamp: &mut [u8]) {
time_provider
.update_from_now()

View File

@ -1,6 +1,5 @@
use log::warn;
use satrs::action::{ActionRequest, ActionRequestVariant};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::action::{
ActionReplyPus, ActionReplyVariant, ActivePusActionRequestStd, DefaultActiveActionRequestMap,
};
@ -10,21 +9,20 @@ use satrs::pus::verification::{
VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, EcssTmSender, EcssTmtcError, GenericConversionError, MpscTcReceiver,
MpscTmAsVecSender, PusPacketHandlingError, PusReplyHandler, PusServiceHelper,
PusTcToRequestConverter,
ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter, EcssTmSender, EcssTmtcError,
GenericConversionError, MpscTcReceiver, PusPacketHandlingError, PusReplyHandler,
PusServiceHelper, PusTcToRequestConverter,
};
use satrs::request::{GenericMessage, UniqueApidTargetId};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::{EcssEnumU16, PusPacket, PusServiceId};
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
use satrs_example::config::components::PUS_ACTION_SERVICE;
use satrs_example::config::tmtc_err;
use std::sync::mpsc;
use std::time::Duration;
use crate::requests::GenericRequestRouter;
use crate::tm_sender::TmSender;
use super::{
create_verification_reporter, generic_pus_request_timeout_handler, HandlingStatus,
@ -207,20 +205,20 @@ impl PusTcToRequestConverter<ActivePusActionRequestStd, ActionRequest> for Actio
}
}
pub fn create_action_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_pool: SharedStaticMemoryPool,
pub fn create_action_service(
tm_sender: TmSender,
tc_in_mem_converter: EcssTcInMemConverter,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ActionReplyPus>>,
) -> ActionServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> ActionServiceWrapper {
let action_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_ACTION_SERVICE.id(),
pus_action_rx,
tm_sender,
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
tc_in_mem_converter,
),
ActionRequestConverter::default(),
// TODO: Implementation which does not use run-time allocation? Maybe something like
@ -235,36 +233,11 @@ pub fn create_action_service_static(
}
}
pub fn create_action_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
action_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ActionReplyPus>>,
) -> ActionServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let action_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_ACTION_SERVICE.id(),
pus_action_rx,
tm_funnel_tx,
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
EcssTcInVecConverter::default(),
),
ActionRequestConverter::default(),
DefaultActiveActionRequestMap::default(),
ActionReplyHandler::default(),
action_router,
reply_receiver,
);
ActionServiceWrapper {
service: action_request_handler,
}
}
pub struct ActionServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
pub struct ActionServiceWrapper {
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
EcssTcInMemConverter,
VerificationReporter,
ActionRequestConverter,
ActionReplyHandler,
@ -275,9 +248,7 @@ pub struct ActionServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTc
>,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
for ActionServiceWrapper<TmSender, TcInMemConverter>
{
impl TargetedPusService for ActionServiceWrapper {
const SERVICE_ID: u8 = PusServiceId::Action as u8;
const SERVICE_STR: &'static str = "action";
@ -303,9 +274,10 @@ mod tests {
use satrs::pus::test_util::{
TEST_APID, TEST_COMPONENT_ID_0, TEST_COMPONENT_ID_1, TEST_UNIQUE_ID_0, TEST_UNIQUE_ID_1,
};
use satrs::pus::verification;
use satrs::pus::verification::test_util::TestVerificationReporter;
use satrs::pus::{verification, EcssTcInVecConverter};
use satrs::request::MessageMetadata;
use satrs::tmtc::PacketAsVec;
use satrs::ComponentId;
use satrs::{
res_code::ResultU16,

View File

@ -1,34 +1,32 @@
use std::sync::mpsc;
use crate::pus::create_verification_reporter;
use satrs::pool::SharedStaticMemoryPool;
use crate::tm_sender::TmSender;
use satrs::pus::event_man::EventRequestWithToken;
use satrs::pus::event_srv::PusEventServiceHandler;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter,
EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender, MpscTcReceiver,
MpscTmAsVecSender, PartialPusHandlingError, PusServiceHelper,
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
PartialPusHandlingError, PusServiceHelper,
};
use satrs::spacepackets::ecss::PusServiceId;
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
use satrs_example::config::components::PUS_EVENT_MANAGEMENT;
use super::{DirectPusService, HandlingStatus};
pub fn create_event_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_pool: SharedStaticMemoryPool,
pub fn create_event_service(
tm_sender: TmSender,
tm_in_pool_converter: EcssTcInMemConverter,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> EventServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> EventServiceWrapper {
let pus_5_handler = PusEventServiceHandler::new(
PusServiceHelper::new(
PUS_EVENT_MANAGEMENT.id(),
pus_event_rx,
tm_sender,
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
tm_in_pool_converter,
),
event_request_tx,
);
@ -37,34 +35,16 @@ pub fn create_event_service_static(
}
}
pub fn create_event_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
) -> EventServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus_5_handler = PusEventServiceHandler::new(
PusServiceHelper::new(
PUS_EVENT_MANAGEMENT.id(),
pus_event_rx,
tm_funnel_tx,
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
EcssTcInVecConverter::default(),
),
event_request_tx,
);
EventServiceWrapper {
handler: pus_5_handler,
}
pub struct EventServiceWrapper {
pub handler: PusEventServiceHandler<
MpscTcReceiver,
TmSender,
EcssTcInMemConverter,
VerificationReporter,
>,
}
pub struct EventServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
pub handler:
PusEventServiceHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
for EventServiceWrapper<TmSender, TcInMemConverter>
{
impl DirectPusService for EventServiceWrapper {
const SERVICE_ID: u8 = PusServiceId::Event as u8;
const SERVICE_STR: &'static str = "events";

View File

@ -1,21 +1,18 @@
use derive_new::new;
use satrs::hk::{CollectionIntervalFactor, HkRequest, HkRequestVariant, UniqueId};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::verification::{
FailParams, TcStateAccepted, TcStateStarted, VerificationReporter,
VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
ActivePusRequestStd, ActiveRequestProvider, DefaultActiveRequestMap, EcssTcAndToken,
EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender,
EcssTmtcError, GenericConversionError, MpscTcReceiver, MpscTmAsVecSender,
EcssTcInMemConverter, EcssTmSender, EcssTmtcError, GenericConversionError, MpscTcReceiver,
PusPacketHandlingError, PusReplyHandler, PusServiceHelper, PusTcToRequestConverter,
};
use satrs::request::{GenericMessage, UniqueApidTargetId};
use satrs::res_code::ResultU16;
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::{hk, PusPacket, PusServiceId};
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
use satrs_example::config::components::PUS_HK_SERVICE;
use satrs_example::config::{hk_err, tmtc_err};
use std::sync::mpsc;
@ -23,6 +20,7 @@ use std::time::Duration;
use crate::pus::{create_verification_reporter, generic_pus_request_timeout_handler};
use crate::requests::GenericRequestRouter;
use crate::tm_sender::TmSender;
use super::{HandlingStatus, PusTargetedRequestService, TargetedPusService};
@ -242,20 +240,20 @@ impl PusTcToRequestConverter<ActivePusRequestStd, HkRequest> for HkRequestConver
}
}
pub fn create_hk_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_pool: SharedStaticMemoryPool,
pub fn create_hk_service(
tm_sender: TmSender,
tc_in_mem_converter: EcssTcInMemConverter,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
) -> HkServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> HkServiceWrapper {
let pus_3_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_HK_SERVICE.id(),
pus_hk_rx,
tm_sender,
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
tc_in_mem_converter,
),
HkRequestConverter::default(),
DefaultActiveRequestMap::default(),
@ -268,36 +266,11 @@ pub fn create_hk_service_static(
}
}
pub fn create_hk_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
request_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
) -> HkServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus_3_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_HK_SERVICE.id(),
pus_hk_rx,
tm_funnel_tx,
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
EcssTcInVecConverter::default(),
),
HkRequestConverter::default(),
DefaultActiveRequestMap::default(),
HkReplyHandler::default(),
request_router,
reply_receiver,
);
HkServiceWrapper {
service: pus_3_handler,
}
}
pub struct HkServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
pub struct HkServiceWrapper {
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
EcssTcInMemConverter,
VerificationReporter,
HkRequestConverter,
HkReplyHandler,
@ -308,9 +281,7 @@ pub struct HkServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMe
>,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
for HkServiceWrapper<TmSender, TcInMemConverter>
{
impl TargetedPusService for HkServiceWrapper {
const SERVICE_ID: u8 = PusServiceId::Housekeeping as u8;
const SERVICE_STR: &'static str = "housekeeping";

View File

@ -1,4 +1,5 @@
use crate::requests::GenericRequestRouter;
use crate::tm_sender::TmSender;
use log::warn;
use satrs::pool::PoolAddr;
use satrs::pus::verification::{
@ -6,7 +7,7 @@ use satrs::pus::verification::{
VerificationReporterCfg, VerificationReportingProvider, VerificationToken,
};
use satrs::pus::{
ActiveRequestMapProvider, ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter,
ActiveRequestMapProvider, ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConversionProvider,
EcssTcReceiver, EcssTmSender, EcssTmtcError, GenericConversionError, GenericRoutingError,
HandlingStatus, PusPacketHandlingError, PusReplyHandler, PusRequestRouter, PusServiceHelper,
PusTcToRequestConverter, TcInMemory,
@ -21,7 +22,7 @@ use satrs_example::config::components::PUS_ROUTING_SERVICE;
use satrs_example::config::{tmtc_err, CustomPusServiceId};
use satrs_example::TimestampHelper;
use std::fmt::Debug;
use std::sync::mpsc::{self, Sender};
use std::sync::mpsc;
pub mod action;
pub mod event;
@ -40,16 +41,16 @@ pub fn create_verification_reporter(owner_id: ComponentId, apid: Apid) -> Verifi
/// Simple router structure which forwards PUS telecommands to dedicated handlers.
pub struct PusTcMpscRouter {
pub test_tc_sender: Sender<EcssTcAndToken>,
pub event_tc_sender: Sender<EcssTcAndToken>,
pub sched_tc_sender: Sender<EcssTcAndToken>,
pub hk_tc_sender: Sender<EcssTcAndToken>,
pub test_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
pub event_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
pub sched_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
pub hk_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
#[allow(dead_code)]
pub action_tc_sender: Sender<EcssTcAndToken>,
pub mode_tc_sender: Sender<EcssTcAndToken>,
pub action_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
pub mode_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
}
pub struct PusTcDistributor<TmSender: EcssTmSender> {
pub struct PusTcDistributor {
#[allow(dead_code)]
pub id: ComponentId,
pub tm_sender: TmSender,
@ -58,7 +59,7 @@ pub struct PusTcDistributor<TmSender: EcssTmSender> {
stamp_helper: TimestampHelper,
}
impl<TmSender: EcssTmSender> PusTcDistributor<TmSender> {
impl PusTcDistributor {
pub fn new(tm_sender: TmSender, pus_router: PusTcMpscRouter) -> Self {
Self {
id: PUS_ROUTING_SERVICE.raw(),
@ -269,7 +270,7 @@ pub trait DirectPusService {
pub struct PusTargetedRequestService<
TcReceiver: EcssTcReceiver,
TmSender: EcssTmSender,
TcInMemConverter: EcssTcInMemConverter,
TcInMemConverter: EcssTcInMemConversionProvider,
VerificationReporter: VerificationReportingProvider,
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
@ -291,7 +292,7 @@ pub struct PusTargetedRequestService<
impl<
TcReceiver: EcssTcReceiver,
TmSender: EcssTmSender,
TcInMemConverter: EcssTcInMemConverter,
TcInMemConverter: EcssTcInMemConversionProvider,
VerificationReporter: VerificationReportingProvider,
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,

View File

@ -1,15 +1,14 @@
use derive_new::new;
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
use satrs::mode_tree::{ModeNode, ModeParent};
use std::sync::mpsc;
use std::time::Duration;
use crate::requests::GenericRequestRouter;
use satrs::pool::SharedStaticMemoryPool;
use crate::tm_sender::TmSender;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
DefaultActiveRequestMap, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
EcssTcInVecConverter, MpscTcReceiver, MpscTmAsVecSender, PusPacketHandlingError,
PusServiceHelper,
DefaultActiveRequestMap, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
PusPacketHandlingError, PusServiceHelper,
};
use satrs::request::GenericMessage;
use satrs::{
@ -110,6 +109,7 @@ impl PusReplyHandler<ActivePusRequestStd, ModeReply> for ModeReplyHandler {
),
)?;
}
ModeReply::ModeInfo(_mode_and_submode) => (),
};
Ok(true)
}
@ -190,7 +190,13 @@ impl PusTcToRequestConverter<ActivePusRequestStd, ModeRequest> for ModeRequestCo
}
let mode_and_submode = ModeAndSubmode::from_be_bytes(&tc.user_data()[4..])
.expect("mode and submode extraction failed");
Ok((active_request, ModeRequest::SetMode(mode_and_submode)))
Ok((
active_request,
ModeRequest::SetMode {
mode_and_submode,
forced: false,
},
))
}
Subservice::TcReadMode => Ok((active_request, ModeRequest::ReadMode)),
Subservice::TcAnnounceMode => Ok((active_request, ModeRequest::AnnounceMode)),
@ -202,20 +208,20 @@ impl PusTcToRequestConverter<ActivePusRequestStd, ModeRequest> for ModeRequestCo
}
}
pub fn create_mode_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_pool: SharedStaticMemoryPool,
pub fn create_mode_service(
tm_sender: TmSender,
tc_in_mem_converter: EcssTcInMemConverter,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
mode_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
) -> ModeServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> ModeServiceWrapper {
let mode_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_MODE_SERVICE.id(),
pus_action_rx,
tm_sender,
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
tc_in_mem_converter,
),
ModeRequestConverter::default(),
DefaultActiveRequestMap::default(),
@ -228,36 +234,11 @@ pub fn create_mode_service_static(
}
}
pub fn create_mode_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
mode_router: GenericRequestRouter,
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
) -> ModeServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let mode_request_handler = PusTargetedRequestService::new(
PusServiceHelper::new(
PUS_MODE_SERVICE.id(),
pus_action_rx,
tm_funnel_tx,
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
EcssTcInVecConverter::default(),
),
ModeRequestConverter::default(),
DefaultActiveRequestMap::default(),
ModeReplyHandler::new(PUS_MODE_SERVICE.id()),
mode_router,
reply_receiver,
);
ModeServiceWrapper {
service: mode_request_handler,
}
}
pub struct ModeServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
pub struct ModeServiceWrapper {
pub(crate) service: PusTargetedRequestService<
MpscTcReceiver,
TmSender,
TcInMemConverter,
EcssTcInMemConverter,
VerificationReporter,
ModeRequestConverter,
ModeReplyHandler,
@ -268,9 +249,24 @@ pub struct ModeServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcIn
>,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
for ModeServiceWrapper<TmSender, TcInMemConverter>
{
impl ModeNode for ModeServiceWrapper {
fn id(&self) -> ComponentId {
self.service.service_helper.id()
}
}
impl ModeParent for ModeServiceWrapper {
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
fn add_mode_child(&mut self, id: ComponentId, request_sender: Self::Sender) {
self.service
.request_router
.mode_router_map
.insert(id, request_sender);
}
}
impl TargetedPusService for ModeServiceWrapper {
const SERVICE_ID: u8 = CustomPusServiceId::Mode as u8;
const SERVICE_STR: &'static str = "mode";
@ -346,7 +342,13 @@ mod tests {
let (_active_req, req) = testbench
.convert(token, &[], TEST_APID, TEST_UNIQUE_ID_0)
.expect("conversion has failed");
assert_eq!(req, ModeRequest::SetMode(mode_and_submode));
assert_eq!(
req,
ModeRequest::SetMode {
mode_and_submode,
forced: false
}
);
}
#[test]

View File

@ -2,15 +2,15 @@ use std::sync::mpsc;
use std::time::Duration;
use crate::pus::create_verification_reporter;
use crate::tm_sender::TmSender;
use log::info;
use satrs::pool::{PoolProvider, StaticMemoryPool};
use satrs::pus::scheduler::{PusScheduler, TcInfo};
use satrs::pus::scheduler_srv::PusSchedServiceHandler;
use satrs::pus::verification::VerificationReporter;
use satrs::pus::{
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter,
EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender, MpscTcReceiver,
MpscTmAsVecSender, PartialPusHandlingError, PusServiceHelper,
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
PartialPusHandlingError, PusServiceHelper,
};
use satrs::spacepackets::ecss::PusServiceId;
use satrs::tmtc::{PacketAsVec, PacketInPool, PacketSenderWithSharedPool};
@ -19,11 +19,11 @@ use satrs_example::config::components::PUS_SCHED_SERVICE;
use super::{DirectPusService, HandlingStatus};
pub trait TcReleaser {
pub trait TcReleaseProvider {
fn release(&mut self, sender_id: ComponentId, enabled: bool, info: &TcInfo, tc: &[u8]) -> bool;
}
impl TcReleaser for PacketSenderWithSharedPool {
impl TcReleaseProvider for PacketSenderWithSharedPool {
fn release(
&mut self,
sender_id: ComponentId,
@ -48,7 +48,7 @@ impl TcReleaser for PacketSenderWithSharedPool {
}
}
impl TcReleaser for mpsc::Sender<PacketAsVec> {
impl TcReleaseProvider for mpsc::SyncSender<PacketAsVec> {
fn release(
&mut self,
sender_id: ComponentId,
@ -65,23 +65,35 @@ impl TcReleaser for mpsc::Sender<PacketAsVec> {
}
}
pub struct SchedulingServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
{
#[allow(dead_code)]
pub enum TcReleaser {
Static(PacketSenderWithSharedPool),
Heap(mpsc::SyncSender<PacketAsVec>),
}
impl TcReleaseProvider for TcReleaser {
fn release(&mut self, sender_id: ComponentId, enabled: bool, info: &TcInfo, tc: &[u8]) -> bool {
match self {
TcReleaser::Static(sender) => sender.release(sender_id, enabled, info, tc),
TcReleaser::Heap(sender) => sender.release(sender_id, enabled, info, tc),
}
}
}
pub struct SchedulingServiceWrapper {
pub pus_11_handler: PusSchedServiceHandler<
MpscTcReceiver,
TmSender,
TcInMemConverter,
EcssTcInMemConverter,
VerificationReporter,
PusScheduler,
>,
pub sched_tc_pool: StaticMemoryPool,
pub releaser_buf: [u8; 4096],
pub tc_releaser: Box<dyn TcReleaser + Send>,
pub tc_releaser: TcReleaser,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
for SchedulingServiceWrapper<TmSender, TcInMemConverter>
{
impl DirectPusService for SchedulingServiceWrapper {
const SERVICE_ID: u8 = PusServiceId::Verification as u8;
const SERVICE_STR: &'static str = "verification";
@ -134,9 +146,7 @@ impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusSe
}
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
SchedulingServiceWrapper<TmSender, TcInMemConverter>
{
impl SchedulingServiceWrapper {
pub fn release_tcs(&mut self) {
let id = self.pus_11_handler.service_helper.id();
let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool {
@ -162,12 +172,13 @@ impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
}
}
pub fn create_scheduler_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_releaser: PacketSenderWithSharedPool,
pub fn create_scheduler_service(
tm_sender: TmSender,
tc_in_mem_converter: EcssTcInMemConverter,
tc_releaser: TcReleaser,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool,
) -> SchedulingServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> SchedulingServiceWrapper {
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
.expect("Creating PUS Scheduler failed");
let pus_11_handler = PusSchedServiceHandler::new(
@ -176,7 +187,7 @@ pub fn create_scheduler_service_static(
pus_sched_rx,
tm_sender,
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_releaser.shared_packet_store().0.clone(), 2048),
tc_in_mem_converter,
),
scheduler,
);
@ -184,34 +195,6 @@ pub fn create_scheduler_service_static(
pus_11_handler,
sched_tc_pool,
releaser_buf: [0; 4096],
tc_releaser: Box::new(tc_releaser),
}
}
pub fn create_scheduler_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
tc_source_sender: mpsc::Sender<PacketAsVec>,
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
sched_tc_pool: StaticMemoryPool,
) -> SchedulingServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
//let sched_srv_receiver =
//MpscTcReceiver::new(PUS_SCHED_SERVICE.raw(), "PUS_11_TC_RECV", pus_sched_rx);
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
.expect("Creating PUS Scheduler failed");
let pus_11_handler = PusSchedServiceHandler::new(
PusServiceHelper::new(
PUS_SCHED_SERVICE.id(),
pus_sched_rx,
tm_funnel_tx,
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
EcssTcInVecConverter::default(),
),
scheduler,
);
SchedulingServiceWrapper {
pus_11_handler,
sched_tc_pool,
releaser_buf: [0; 4096],
tc_releaser: Box::new(tc_source_sender),
tc_releaser,
}
}

View File

@ -1,9 +1,6 @@
use crate::pus::mode::ModeServiceWrapper;
use derive_new::new;
use satrs::{
pus::{EcssTcInMemConverter, EcssTmSender},
spacepackets::time::{cds, TimeWriter},
};
use satrs::spacepackets::time::{cds, TimeWriter};
use super::{
action::ActionServiceWrapper, event::EventServiceWrapper, hk::HkServiceWrapper,
@ -11,21 +8,17 @@ use super::{
HandlingStatus, TargetedPusService,
};
// TODO: For better extensibility, we could create 2 vectors: One for direct PUS services and one
// for targeted services..
#[derive(new)]
pub struct PusStack<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
test_srv: TestCustomServiceWrapper<TmSender, TcInMemConverter>,
hk_srv_wrapper: HkServiceWrapper<TmSender, TcInMemConverter>,
event_srv: EventServiceWrapper<TmSender, TcInMemConverter>,
action_srv_wrapper: ActionServiceWrapper<TmSender, TcInMemConverter>,
schedule_srv: SchedulingServiceWrapper<TmSender, TcInMemConverter>,
mode_srv: ModeServiceWrapper<TmSender, TcInMemConverter>,
pub struct PusStack {
pub test_srv: TestCustomServiceWrapper,
pub hk_srv_wrapper: HkServiceWrapper,
pub event_srv: EventServiceWrapper,
pub action_srv_wrapper: ActionServiceWrapper,
pub schedule_srv: SchedulingServiceWrapper,
pub mode_srv: ModeServiceWrapper,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
PusStack<TmSender, TcInMemConverter>
{
impl PusStack {
pub fn periodic_operation(&mut self) {
// Release all telecommands which reached their release time before calling the service
// handlers.

View File

@ -1,35 +1,34 @@
use crate::pus::create_verification_reporter;
use crate::tm_sender::TmSender;
use log::info;
use satrs::event_man::{EventMessage, EventMessageU32};
use satrs::pool::SharedStaticMemoryPool;
use satrs::pus::test::PusService17TestHandler;
use satrs::pus::verification::{FailParams, VerificationReporter, VerificationReportingProvider};
use satrs::pus::PartialPusHandlingError;
use satrs::pus::{
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter,
EcssTmSender, MpscTcReceiver, MpscTmAsVecSender, PusServiceHelper,
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConversionProvider,
EcssTcInMemConverter, MpscTcReceiver, PusServiceHelper,
};
use satrs::pus::{EcssTcInSharedStoreConverter, PartialPusHandlingError};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::{PusPacket, PusServiceId};
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
use satrs_example::config::components::PUS_TEST_SERVICE;
use satrs_example::config::{tmtc_err, TEST_EVENT};
use std::sync::mpsc;
use super::{DirectPusService, HandlingStatus};
pub fn create_test_service_static(
tm_sender: PacketSenderWithSharedPool,
tc_pool: SharedStaticMemoryPool,
pub fn create_test_service(
tm_sender: TmSender,
tc_in_mem_converter: EcssTcInMemConverter,
event_sender: mpsc::SyncSender<EventMessageU32>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> TestCustomServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
) -> TestCustomServiceWrapper {
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
PUS_TEST_SERVICE.id(),
pus_test_rx,
tm_sender,
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
tc_in_mem_converter,
));
TestCustomServiceWrapper {
handler: pus17_handler,
@ -37,34 +36,17 @@ pub fn create_test_service_static(
}
}
pub fn create_test_service_dynamic(
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
event_sender: mpsc::SyncSender<EventMessageU32>,
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
) -> TestCustomServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
PUS_TEST_SERVICE.id(),
pus_test_rx,
tm_funnel_tx,
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
EcssTcInVecConverter::default(),
));
TestCustomServiceWrapper {
handler: pus17_handler,
event_tx: event_sender,
}
}
pub struct TestCustomServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
{
pub handler:
PusService17TestHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
pub struct TestCustomServiceWrapper {
pub handler: PusService17TestHandler<
MpscTcReceiver,
TmSender,
EcssTcInMemConverter,
VerificationReporter,
>,
pub event_tx: mpsc::SyncSender<EventMessageU32>,
}
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
for TestCustomServiceWrapper<TmSender, TcInMemConverter>
{
impl DirectPusService for TestCustomServiceWrapper {
const SERVICE_ID: u8 = PusServiceId::Test as u8;
const SERVICE_STR: &'static str = "test";

6
satrs-example/src/spi.rs Normal file
View File

@ -0,0 +1,6 @@
use core::fmt::Debug;
pub trait SpiInterface {
type Error: Debug;
fn transfer(&mut self, tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error>;
}

View File

@ -0,0 +1,33 @@
use std::sync::mpsc;
use satrs::{
pus::EcssTmSender,
queue::GenericSendError,
spacepackets::ecss::WritablePusPacket,
tmtc::{PacketAsVec, PacketSenderWithSharedPool},
};
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub enum TmSender {
Static(PacketSenderWithSharedPool),
Heap(mpsc::SyncSender<PacketAsVec>),
}
impl EcssTmSender for TmSender {
fn send_tm(
&self,
sender_id: satrs::ComponentId,
tm: satrs::pus::PusTmVariant,
) -> Result<(), satrs::pus::EcssTmtcError> {
match self {
TmSender::Static(sync_sender) => sync_sender.send_tm(sender_id, tm),
TmSender::Heap(sync_sender) => match tm {
satrs::pus::PusTmVariant::InStore(_) => panic!("can not send TM in store"),
satrs::pus::PusTmVariant::Direct(pus_tm_creator) => sync_sender
.send(PacketAsVec::new(sender_id, pus_tm_creator.to_vec()?))
.map_err(|_| GenericSendError::RxDisconnected.into()),
},
}
}
}

View File

@ -1,12 +1,10 @@
use satrs::{
pool::PoolProvider,
pus::HandlingStatus,
tmtc::{PacketAsVec, PacketInPool, PacketSenderWithSharedPool, SharedPacketPool},
tmtc::{PacketAsVec, PacketInPool, SharedPacketPool},
};
use std::sync::mpsc::{self, TryRecvError};
use satrs::pus::MpscTmAsVecSender;
use crate::pus::PusTcDistributor;
// TC source components where static pools are the backing memory of the received telecommands.
@ -14,14 +12,15 @@ pub struct TcSourceTaskStatic {
shared_tc_pool: SharedPacketPool,
tc_receiver: mpsc::Receiver<PacketInPool>,
tc_buf: [u8; 4096],
pus_distributor: PusTcDistributor<PacketSenderWithSharedPool>,
pus_distributor: PusTcDistributor,
}
#[allow(dead_code)]
impl TcSourceTaskStatic {
pub fn new(
shared_tc_pool: SharedPacketPool,
tc_receiver: mpsc::Receiver<PacketInPool>,
pus_receiver: PusTcDistributor<PacketSenderWithSharedPool>,
pus_receiver: PusTcDistributor,
) -> Self {
Self {
shared_tc_pool,
@ -67,14 +66,12 @@ impl TcSourceTaskStatic {
// TC source components where the heap is the backing memory of the received telecommands.
pub struct TcSourceTaskDynamic {
pub tc_receiver: mpsc::Receiver<PacketAsVec>,
pus_distributor: PusTcDistributor<MpscTmAsVecSender>,
pus_distributor: PusTcDistributor,
}
#[allow(dead_code)]
impl TcSourceTaskDynamic {
pub fn new(
tc_receiver: mpsc::Receiver<PacketAsVec>,
pus_receiver: PusTcDistributor<MpscTmAsVecSender>,
) -> Self {
pub fn new(tc_receiver: mpsc::Receiver<PacketAsVec>, pus_receiver: PusTcDistributor) -> Self {
Self {
tc_receiver,
pus_distributor: pus_receiver,
@ -105,3 +102,18 @@ impl TcSourceTaskDynamic {
}
}
}
#[allow(dead_code)]
pub enum TcSourceTask {
Static(TcSourceTaskStatic),
Heap(TcSourceTaskDynamic),
}
impl TcSourceTask {
pub fn periodic_operation(&mut self) {
match self {
TcSourceTask::Static(task) => task.periodic_operation(),
TcSourceTask::Heap(task) => task.periodic_operation(),
}
}
}

View File

@ -89,6 +89,7 @@ pub struct TmSinkStatic {
tm_server_tx: mpsc::SyncSender<PacketInPool>,
}
#[allow(dead_code)]
impl TmSinkStatic {
pub fn new(
shared_tm_store: SharedPacketPool,
@ -132,14 +133,15 @@ impl TmSinkStatic {
pub struct TmSinkDynamic {
common: TmFunnelCommon,
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
tm_server_tx: mpsc::Sender<PacketAsVec>,
tm_server_tx: mpsc::SyncSender<PacketAsVec>,
}
#[allow(dead_code)]
impl TmSinkDynamic {
pub fn new(
sync_tm_tcp_source: SyncTcpTmSource,
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
tm_server_tx: mpsc::Sender<PacketAsVec>,
tm_server_tx: mpsc::SyncSender<PacketAsVec>,
) -> Self {
Self {
common: TmFunnelCommon::new(sync_tm_tcp_source),
@ -162,3 +164,18 @@ impl TmSinkDynamic {
}
}
}
#[allow(dead_code)]
pub enum TmSink {
Static(TmSinkStatic),
Heap(TmSinkDynamic),
}
impl TmSink {
pub fn operation(&mut self) {
match self {
TmSink::Static(static_sink) => static_sink.operation(),
TmSink::Heap(dynamic_sink) => dynamic_sink.operation(),
}
}
}