Compare commits
39 Commits
satrs-v0.3
...
018c2593f8
Author | SHA1 | Date | |
---|---|---|---|
018c2593f8
|
|||
df28ab68f4 | |||
a196dc7f35 | |||
60091279fa | |||
23841a14dc
|
|||
68939d3699
|
|||
b024343e17
|
|||
f2648fb3b6 | |||
9325707fe8 | |||
04663cb5ae | |||
127d22d445 | |||
0870471886 | |||
365f8f9e7a | |||
9130e68bce | |||
32bc2826d9 | |||
d107ab5ed3 | |||
96e88659ab | |||
0d930b5832 | |||
c952d813d4 | |||
b3b1569b3d | |||
5d7423a19e | |||
975e1a5323 | |||
c8bed19e42 | |||
ce92ab9b2f | |||
7f674dd5bf | |||
a6aa20d09b | |||
3a02fcf77a | |||
95d232dc02 | |||
df24f50e8e | |||
a42aefff87 | |||
d4339f3ea3 | |||
b07b8d6347 | |||
0afcc35513 | |||
a2c2e35067 | |||
4fc7972bdd | |||
04b96579bd | |||
c973339ee5 | |||
656aafccff | |||
3569fce95e |
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@ -11,9 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- run: cargo check
|
- run: cargo check --release
|
||||||
# Check example with static pool configuration
|
|
||||||
- run: cargo check -p satrs-example --no-default-features
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Run Tests
|
name: Run Tests
|
||||||
@ -39,7 +37,7 @@ jobs:
|
|||||||
- uses: dtolnay/rust-toolchain@stable
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
|
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
|
||||||
- run: cargo check -p satrs --target=${{matrix.target}} --no-default-features
|
- run: cargo check -p satrs --release --target=${{matrix.target}} --no-default-features
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Check formatting
|
name: Check formatting
|
||||||
|
10
embedded-examples/stm32h7-nucleo-rtic/Cargo.lock
generated
10
embedded-examples/stm32h7-nucleo-rtic/Cargo.lock
generated
@ -631,7 +631,6 @@ dependencies = [
|
|||||||
"satrs-shared",
|
"satrs-shared",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"spacepackets",
|
"spacepackets",
|
||||||
"static_cell",
|
|
||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -721,15 +720,6 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "static_cell"
|
|
||||||
version = "2.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d89b0684884a883431282db1e4343f34afc2ff6996fe1f4a1664519b66e14c1e"
|
|
||||||
dependencies = [
|
|
||||||
"portable-atomic",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stm32h7"
|
name = "stm32h7"
|
||||||
version = "0.15.1"
|
version = "0.15.1"
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
use rtic::app;
|
use rtic::app;
|
||||||
|
use rtic_monotonics::fugit::TimerInstantU32;
|
||||||
use rtic_monotonics::systick::prelude::*;
|
use rtic_monotonics::systick::prelude::*;
|
||||||
use satrs::pool::{PoolAddr, PoolProvider, StaticHeaplessMemoryPool};
|
use satrs::pool::{PoolAddr, PoolProvider, StaticHeaplessMemoryPool};
|
||||||
use satrs::static_subpool;
|
use satrs::static_subpool;
|
||||||
@ -73,7 +74,9 @@ impl Net {
|
|||||||
let mut iface = Interface::new(
|
let mut iface = Interface::new(
|
||||||
config,
|
config,
|
||||||
&mut ethdev,
|
&mut ethdev,
|
||||||
smoltcp::time::Instant::from_millis(Mono::now().duration_since_epoch().to_millis()),
|
smoltcp::time::Instant::from_millis(
|
||||||
|
(Mono::now() - TimerInstantU32::<1000>::from_ticks(0)).to_millis(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
// Create sockets
|
// Create sockets
|
||||||
let dhcp_socket = dhcpv4::Socket::new();
|
let dhcp_socket = dhcpv4::Socket::new();
|
||||||
@ -92,7 +95,7 @@ impl Net {
|
|||||||
/// Polls on the ethernet interface. You should refer to the smoltcp
|
/// Polls on the ethernet interface. You should refer to the smoltcp
|
||||||
/// documentation for poll() to understand how to call poll efficiently
|
/// documentation for poll() to understand how to call poll efficiently
|
||||||
pub fn poll<'a>(&mut self, sockets: &'a mut SocketSet) -> bool {
|
pub fn poll<'a>(&mut self, sockets: &'a mut SocketSet) -> bool {
|
||||||
let uptime = Mono::now().duration_since_epoch();
|
let uptime = Mono::now() - TimerInstantU32::<1000>::from_ticks(0);
|
||||||
let timestamp = smoltcp::time::Instant::from_millis(uptime.to_millis());
|
let timestamp = smoltcp::time::Instant::from_millis(uptime.to_millis());
|
||||||
|
|
||||||
self.iface.poll(timestamp, &mut self.ethdev, sockets)
|
self.iface.poll(timestamp, &mut self.ethdev, sockets)
|
||||||
|
@ -20,7 +20,6 @@ thiserror = "2"
|
|||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
strum = { version = "0.26", features = ["derive"] }
|
strum = { version = "0.26", features = ["derive"] }
|
||||||
derive-new = "0.7"
|
derive-new = "0.7"
|
||||||
cfg-if = "1"
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
|
|
||||||
@ -36,8 +35,8 @@ version = "0.1.1"
|
|||||||
path = "../satrs-mib"
|
path = "../satrs-mib"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
heap_tmtc = []
|
dyn_tmtc = []
|
||||||
default = ["heap_tmtc"]
|
default = ["dyn_tmtc"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = "0.11"
|
env_logger = "0.11"
|
||||||
|
@ -14,7 +14,7 @@ You can run the application using `cargo run`.
|
|||||||
|
|
||||||
# Features
|
# Features
|
||||||
|
|
||||||
The example has the `heap_tmtc` feature which is enabled by default. With this feature enabled,
|
The example has the `dyn_tmtc` feature which is enabled by default. With this feature enabled,
|
||||||
TMTC packets are exchanged using the heap as the backing memory instead of pre-allocated static
|
TMTC packets are exchanged using the heap as the backing memory instead of pre-allocated static
|
||||||
stores.
|
stores.
|
||||||
|
|
||||||
|
@ -39,10 +39,7 @@ class EventU32:
|
|||||||
|
|
||||||
|
|
||||||
class AcsId(enum.IntEnum):
|
class AcsId(enum.IntEnum):
|
||||||
SUBSYSTEM = 1
|
MGM_0 = 0
|
||||||
MGM_ASSEMBLY = 2
|
|
||||||
MGM_0 = 3
|
|
||||||
MGM_1 = 4
|
|
||||||
|
|
||||||
|
|
||||||
class AcsHkIds(enum.IntEnum):
|
class AcsHkIds(enum.IntEnum):
|
||||||
|
@ -1 +0,0 @@
|
|||||||
// TODO: Write the assembly
|
|
@ -1 +0,0 @@
|
|||||||
// TODO: Write dummy controller
|
|
@ -1,8 +1,7 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use satrs::hk::{HkRequest, HkRequestVariant};
|
use satrs::hk::{HkRequest, HkRequestVariant};
|
||||||
use satrs::mode_tree::{ModeChild, ModeNode};
|
|
||||||
use satrs::power::{PowerSwitchInfo, PowerSwitcherCommandSender};
|
use satrs::power::{PowerSwitchInfo, PowerSwitcherCommandSender};
|
||||||
use satrs_example::config::pus::PUS_MODE_SERVICE;
|
use satrs::queue::{GenericSendError, GenericTargetedMessagingError};
|
||||||
use satrs_example::{DeviceMode, TimestampHelper};
|
use satrs_example::{DeviceMode, TimestampHelper};
|
||||||
use satrs_minisim::acs::lis3mdl::{
|
use satrs_minisim::acs::lis3mdl::{
|
||||||
MgmLis3MdlReply, MgmLis3RawValues, FIELD_LSB_PER_GAUSS_4_SENS, GAUSS_TO_MICROTESLA_FACTOR,
|
MgmLis3MdlReply, MgmLis3RawValues, FIELD_LSB_PER_GAUSS_4_SENS, GAUSS_TO_MICROTESLA_FACTOR,
|
||||||
@ -16,18 +15,15 @@ use std::sync::{Arc, Mutex};
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use satrs::mode::{
|
use satrs::mode::{
|
||||||
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler,
|
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequest, ModeRequestHandler,
|
||||||
ModeRequestHandlerMpscBounded,
|
|
||||||
};
|
};
|
||||||
use satrs::pus::{EcssTmSender, PusTmVariant};
|
use satrs::pus::{EcssTmSender, PusTmVariant};
|
||||||
use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
|
use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
|
||||||
use satrs_example::config::components::NO_SENDER;
|
use satrs_example::config::components::{NO_SENDER, PUS_MODE_SERVICE};
|
||||||
|
|
||||||
use crate::hk::PusHkHelper;
|
use crate::hk::PusHkHelper;
|
||||||
use crate::pus::hk::{HkReply, HkReplyVariant};
|
use crate::pus::hk::{HkReply, HkReplyVariant};
|
||||||
use crate::requests::CompositeRequest;
|
use crate::requests::CompositeRequest;
|
||||||
use crate::spi::SpiInterface;
|
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -52,6 +48,11 @@ pub enum TransitionState {
|
|||||||
Done,
|
Done,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait SpiInterface {
|
||||||
|
type Error: Debug;
|
||||||
|
fn transfer(&mut self, tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error>;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct SpiDummyInterface {
|
pub struct SpiDummyInterface {
|
||||||
pub dummy_values: MgmLis3RawValues,
|
pub dummy_values: MgmLis3RawValues,
|
||||||
@ -128,6 +129,13 @@ pub struct MgmData {
|
|||||||
pub z: f32,
|
pub z: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct MpscModeLeafInterface {
|
||||||
|
pub request_rx: mpsc::Receiver<GenericMessage<ModeRequest>>,
|
||||||
|
pub reply_to_pus_tx: mpsc::Sender<GenericMessage<ModeReply>>,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub reply_to_parent_tx: mpsc::SyncSender<GenericMessage<ModeReply>>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct BufWrapper {
|
pub struct BufWrapper {
|
||||||
tx_buf: [u8; 32],
|
tx_buf: [u8; 32],
|
||||||
@ -158,15 +166,16 @@ impl Default for ModeHelpers {
|
|||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub struct MgmHandlerLis3Mdl<
|
pub struct MgmHandlerLis3Mdl<
|
||||||
ComInterface: SpiInterface,
|
ComInterface: SpiInterface,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
||||||
> {
|
> {
|
||||||
id: UniqueApidTargetId,
|
id: UniqueApidTargetId,
|
||||||
dev_str: &'static str,
|
dev_str: &'static str,
|
||||||
mode_node: ModeRequestHandlerMpscBounded,
|
mode_interface: MpscModeLeafInterface,
|
||||||
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
|
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
|
||||||
hk_reply_tx: mpsc::SyncSender<GenericMessage<HkReply>>,
|
hk_reply_tx: mpsc::Sender<GenericMessage<HkReply>>,
|
||||||
switch_helper: SwitchHelper,
|
switch_helper: SwitchHelper,
|
||||||
tm_sender: TmTcSender,
|
tm_sender: TmSender,
|
||||||
pub com_interface: ComInterface,
|
pub com_interface: ComInterface,
|
||||||
shared_mgm_set: Arc<Mutex<MgmData>>,
|
shared_mgm_set: Arc<Mutex<MgmData>>,
|
||||||
#[new(value = "PusHkHelper::new(id)")]
|
#[new(value = "PusHkHelper::new(id)")]
|
||||||
@ -181,8 +190,9 @@ pub struct MgmHandlerLis3Mdl<
|
|||||||
|
|
||||||
impl<
|
impl<
|
||||||
ComInterface: SpiInterface,
|
ComInterface: SpiInterface,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
||||||
> MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
|
> MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
|
||||||
{
|
{
|
||||||
pub fn periodic_operation(&mut self) {
|
pub fn periodic_operation(&mut self) {
|
||||||
self.stamp_helper.update_from_now();
|
self.stamp_helper.update_from_now();
|
||||||
@ -265,9 +275,8 @@ impl<
|
|||||||
pub fn handle_mode_requests(&mut self) {
|
pub fn handle_mode_requests(&mut self) {
|
||||||
loop {
|
loop {
|
||||||
// TODO: Only allow one set mode request per cycle?
|
// TODO: Only allow one set mode request per cycle?
|
||||||
match self.mode_node.try_recv_mode_request() {
|
match self.mode_interface.request_rx.try_recv() {
|
||||||
Ok(opt_msg) => {
|
Ok(msg) => {
|
||||||
if let Some(msg) = opt_msg {
|
|
||||||
let result = self.handle_mode_request(msg);
|
let result = self.handle_mode_request(msg);
|
||||||
// TODO: Trigger event?
|
// TODO: Trigger event?
|
||||||
if result.is_err() {
|
if result.is_err() {
|
||||||
@ -277,16 +286,14 @@ impl<
|
|||||||
result.err().unwrap()
|
result.err().unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if e != mpsc::TryRecvError::Empty {
|
||||||
|
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => match e {
|
|
||||||
satrs::queue::GenericReceiveError::Empty => break,
|
|
||||||
satrs::queue::GenericReceiveError::TxDisconnected(e) => {
|
|
||||||
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -358,8 +365,9 @@ impl<
|
|||||||
|
|
||||||
impl<
|
impl<
|
||||||
ComInterface: SpiInterface,
|
ComInterface: SpiInterface,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
||||||
> ModeProvider for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
|
> ModeProvider for MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
|
||||||
{
|
{
|
||||||
fn mode_and_submode(&self) -> ModeAndSubmode {
|
fn mode_and_submode(&self) -> ModeAndSubmode {
|
||||||
self.mode_helpers.current
|
self.mode_helpers.current
|
||||||
@ -368,8 +376,9 @@ impl<
|
|||||||
|
|
||||||
impl<
|
impl<
|
||||||
ComInterface: SpiInterface,
|
ComInterface: SpiInterface,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
||||||
> ModeRequestHandler for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
|
> ModeRequestHandler for MgmHandlerLis3Mdl<ComInterface, TmSender, SwitchHelper>
|
||||||
{
|
{
|
||||||
type Error = ModeError;
|
type Error = ModeError;
|
||||||
|
|
||||||
@ -440,9 +449,10 @@ impl<
|
|||||||
requestor.sender_id()
|
requestor.sender_id()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.mode_node
|
self.mode_interface
|
||||||
.send_mode_reply(requestor, reply)
|
.reply_to_pus_tx
|
||||||
.map_err(ModeError::Send)?;
|
.send(GenericMessage::new(requestor, reply))
|
||||||
|
.map_err(|_| GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -455,44 +465,17 @@ impl<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<
|
|
||||||
ComInterface: SpiInterface,
|
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
|
||||||
> ModeNode for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
|
|
||||||
{
|
|
||||||
fn id(&self) -> satrs::ComponentId {
|
|
||||||
self.id.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<
|
|
||||||
ComInterface: SpiInterface,
|
|
||||||
SwitchHelper: PowerSwitchInfo<PcduSwitch> + PowerSwitcherCommandSender<PcduSwitch>,
|
|
||||||
> ModeChild for MgmHandlerLis3Mdl<ComInterface, SwitchHelper>
|
|
||||||
{
|
|
||||||
type Sender = mpsc::SyncSender<GenericMessage<ModeReply>>;
|
|
||||||
|
|
||||||
fn add_mode_parent(&mut self, id: satrs::ComponentId, reply_sender: Self::Sender) {
|
|
||||||
self.mode_node.add_message_target(id, reply_sender);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{
|
use std::sync::{mpsc, Arc};
|
||||||
collections::HashMap,
|
|
||||||
sync::{mpsc, Arc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use satrs::{
|
use satrs::{
|
||||||
mode::{ModeReply, ModeRequest},
|
mode::{ModeReply, ModeRequest},
|
||||||
mode_tree::ModeParent,
|
|
||||||
power::SwitchStateBinary,
|
power::SwitchStateBinary,
|
||||||
request::{GenericMessage, UniqueApidTargetId},
|
request::{GenericMessage, UniqueApidTargetId},
|
||||||
tmtc::PacketAsVec,
|
tmtc::PacketAsVec,
|
||||||
ComponentId,
|
|
||||||
};
|
};
|
||||||
use satrs_example::config::{acs::MGM_ASSEMBLY, components::Apid};
|
use satrs_example::config::components::Apid;
|
||||||
use satrs_minisim::acs::lis3mdl::MgmLis3RawValues;
|
use satrs_minisim::acs::lis3mdl::MgmLis3RawValues;
|
||||||
|
|
||||||
use crate::{eps::TestSwitchHelper, pus::hk::HkReply, requests::CompositeRequest};
|
use crate::{eps::TestSwitchHelper, pus::hk::HkReply, requests::CompositeRequest};
|
||||||
@ -520,88 +503,49 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub struct MgmTestbench {
|
pub struct MgmTestbench {
|
||||||
pub mode_request_tx: mpsc::SyncSender<GenericMessage<ModeRequest>>,
|
pub mode_request_tx: mpsc::Sender<GenericMessage<ModeRequest>>,
|
||||||
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
|
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
|
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
|
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
|
||||||
pub hk_reply_rx: mpsc::Receiver<GenericMessage<HkReply>>,
|
pub hk_reply_rx: mpsc::Receiver<GenericMessage<HkReply>>,
|
||||||
pub tm_rx: mpsc::Receiver<PacketAsVec>,
|
pub tm_rx: mpsc::Receiver<PacketAsVec>,
|
||||||
pub handler: MgmHandlerLis3Mdl<TestSpiInterface, TestSwitchHelper>,
|
pub handler:
|
||||||
}
|
MgmHandlerLis3Mdl<TestSpiInterface, mpsc::Sender<PacketAsVec>, TestSwitchHelper>,
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct MgmAssemblyMock(
|
|
||||||
pub HashMap<ComponentId, mpsc::SyncSender<GenericMessage<ModeRequest>>>,
|
|
||||||
);
|
|
||||||
|
|
||||||
impl ModeNode for MgmAssemblyMock {
|
|
||||||
fn id(&self) -> satrs::ComponentId {
|
|
||||||
PUS_MODE_SERVICE.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModeParent for MgmAssemblyMock {
|
|
||||||
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
|
|
||||||
|
|
||||||
fn add_mode_child(&mut self, id: satrs::ComponentId, request_sender: Self::Sender) {
|
|
||||||
self.0.insert(id, request_sender);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct PusMock {
|
|
||||||
pub request_sender_map: HashMap<ComponentId, mpsc::SyncSender<GenericMessage<ModeRequest>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModeNode for PusMock {
|
|
||||||
fn id(&self) -> satrs::ComponentId {
|
|
||||||
PUS_MODE_SERVICE.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModeParent for PusMock {
|
|
||||||
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
|
|
||||||
|
|
||||||
fn add_mode_child(&mut self, id: satrs::ComponentId, request_sender: Self::Sender) {
|
|
||||||
self.request_sender_map.insert(id, request_sender);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MgmTestbench {
|
impl MgmTestbench {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let (request_tx, request_rx) = mpsc::sync_channel(5);
|
let (request_tx, request_rx) = mpsc::channel();
|
||||||
let (reply_tx_to_pus, reply_rx_to_pus) = mpsc::sync_channel(5);
|
let (reply_tx_to_pus, reply_rx_to_pus) = mpsc::channel();
|
||||||
let (reply_tx_to_parent, reply_rx_to_parent) = mpsc::sync_channel(5);
|
let (reply_tx_to_parent, reply_rx_to_parent) = mpsc::sync_channel(5);
|
||||||
let id = UniqueApidTargetId::new(Apid::Acs as u16, 1);
|
let mode_interface = MpscModeLeafInterface {
|
||||||
let mode_node = ModeRequestHandlerMpscBounded::new(id.into(), request_rx);
|
request_rx,
|
||||||
|
reply_to_pus_tx: reply_tx_to_pus,
|
||||||
|
reply_to_parent_tx: reply_tx_to_parent,
|
||||||
|
};
|
||||||
let (composite_request_tx, composite_request_rx) = mpsc::channel();
|
let (composite_request_tx, composite_request_rx) = mpsc::channel();
|
||||||
let (hk_reply_tx, hk_reply_rx) = mpsc::sync_channel(10);
|
let (hk_reply_tx, hk_reply_rx) = mpsc::channel();
|
||||||
let (tm_tx, tm_rx) = mpsc::sync_channel(10);
|
let (tm_tx, tm_rx) = mpsc::channel::<PacketAsVec>();
|
||||||
let tm_sender = TmTcSender::Heap(tm_tx);
|
|
||||||
let shared_mgm_set = Arc::default();
|
let shared_mgm_set = Arc::default();
|
||||||
let mut handler = MgmHandlerLis3Mdl::new(
|
|
||||||
id,
|
|
||||||
"TEST_MGM",
|
|
||||||
mode_node,
|
|
||||||
composite_request_rx,
|
|
||||||
hk_reply_tx,
|
|
||||||
TestSwitchHelper::default(),
|
|
||||||
tm_sender,
|
|
||||||
TestSpiInterface::default(),
|
|
||||||
shared_mgm_set,
|
|
||||||
);
|
|
||||||
handler.add_mode_parent(PUS_MODE_SERVICE.into(), reply_tx_to_pus);
|
|
||||||
handler.add_mode_parent(MGM_ASSEMBLY.into(), reply_tx_to_parent);
|
|
||||||
Self {
|
Self {
|
||||||
mode_request_tx: request_tx,
|
mode_request_tx: request_tx,
|
||||||
mode_reply_rx_to_pus: reply_rx_to_pus,
|
mode_reply_rx_to_pus: reply_rx_to_pus,
|
||||||
mode_reply_rx_to_parent: reply_rx_to_parent,
|
mode_reply_rx_to_parent: reply_rx_to_parent,
|
||||||
composite_request_tx,
|
composite_request_tx,
|
||||||
handler,
|
|
||||||
tm_rx,
|
tm_rx,
|
||||||
hk_reply_rx,
|
hk_reply_rx,
|
||||||
|
handler: MgmHandlerLis3Mdl::new(
|
||||||
|
UniqueApidTargetId::new(Apid::Acs as u16, 1),
|
||||||
|
"TEST_MGM",
|
||||||
|
mode_interface,
|
||||||
|
composite_request_rx,
|
||||||
|
hk_reply_tx,
|
||||||
|
TestSwitchHelper::default(),
|
||||||
|
tm_tx,
|
||||||
|
TestSpiInterface::default(),
|
||||||
|
shared_mgm_set,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1 @@
|
|||||||
pub mod assembly;
|
|
||||||
pub mod ctrl;
|
|
||||||
pub mod mgm;
|
pub mod mgm;
|
||||||
pub mod subsystem;
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
// TODO: Write subsystem
|
|
@ -135,33 +135,6 @@ pub mod components {
|
|||||||
Eps = 6,
|
Eps = 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
|
||||||
pub enum EpsId {
|
|
||||||
Pcdu = 0,
|
|
||||||
Subsystem = 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
|
||||||
pub enum TmtcId {
|
|
||||||
UdpServer = 0,
|
|
||||||
TcpServer = 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const EPS_SUBSYSTEM: UniqueApidTargetId =
|
|
||||||
UniqueApidTargetId::new(Apid::Eps as u16, EpsId::Subsystem as u32);
|
|
||||||
pub const PCDU_HANDLER: UniqueApidTargetId =
|
|
||||||
UniqueApidTargetId::new(Apid::Eps as u16, EpsId::Pcdu as u32);
|
|
||||||
pub const UDP_SERVER: UniqueApidTargetId =
|
|
||||||
UniqueApidTargetId::new(Apid::Tmtc as u16, TmtcId::UdpServer as u32);
|
|
||||||
pub const TCP_SERVER: UniqueApidTargetId =
|
|
||||||
UniqueApidTargetId::new(Apid::Tmtc as u16, TmtcId::TcpServer as u32);
|
|
||||||
pub const NO_SENDER: ComponentId = ComponentId::MAX;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod pus {
|
|
||||||
use super::components::Apid;
|
|
||||||
use satrs::request::UniqueApidTargetId;
|
|
||||||
|
|
||||||
// Component IDs for components with the PUS APID.
|
// Component IDs for components with the PUS APID.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
pub enum PusId {
|
pub enum PusId {
|
||||||
@ -172,6 +145,23 @@ pub mod pus {
|
|||||||
PusMode = 4,
|
PusMode = 4,
|
||||||
PusHk = 5,
|
PusHk = 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum AcsId {
|
||||||
|
Mgm0 = 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum EpsId {
|
||||||
|
Pcdu = 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum TmtcId {
|
||||||
|
UdpServer = 0,
|
||||||
|
TcpServer = 1,
|
||||||
|
}
|
||||||
|
|
||||||
pub const PUS_ACTION_SERVICE: UniqueApidTargetId =
|
pub const PUS_ACTION_SERVICE: UniqueApidTargetId =
|
||||||
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusAction as u32);
|
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusAction as u32);
|
||||||
pub const PUS_EVENT_MANAGEMENT: UniqueApidTargetId =
|
pub const PUS_EVENT_MANAGEMENT: UniqueApidTargetId =
|
||||||
@ -186,26 +176,15 @@ pub mod pus {
|
|||||||
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusHk as u32);
|
UniqueApidTargetId::new(Apid::GenericPus as u16, PusId::PusHk as u32);
|
||||||
pub const PUS_SCHED_SERVICE: UniqueApidTargetId =
|
pub const PUS_SCHED_SERVICE: UniqueApidTargetId =
|
||||||
UniqueApidTargetId::new(Apid::Sched as u16, 0);
|
UniqueApidTargetId::new(Apid::Sched as u16, 0);
|
||||||
}
|
|
||||||
|
|
||||||
pub mod acs {
|
|
||||||
use super::components::Apid;
|
|
||||||
use satrs::request::UniqueApidTargetId;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
|
||||||
pub enum AcsId {
|
|
||||||
Subsystem = 1,
|
|
||||||
Assembly = 2,
|
|
||||||
Mgm0 = 3,
|
|
||||||
Mgm1 = 4,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const MGM_ASSEMBLY: UniqueApidTargetId =
|
|
||||||
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Assembly as u32);
|
|
||||||
pub const MGM_HANDLER_0: UniqueApidTargetId =
|
pub const MGM_HANDLER_0: UniqueApidTargetId =
|
||||||
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Mgm0 as u32);
|
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Mgm0 as u32);
|
||||||
pub const MGM_HANDLER_1: UniqueApidTargetId =
|
pub const PCDU_HANDLER: UniqueApidTargetId =
|
||||||
UniqueApidTargetId::new(Apid::Acs as u16, AcsId::Mgm0 as u32);
|
UniqueApidTargetId::new(Apid::Eps as u16, EpsId::Pcdu as u32);
|
||||||
|
pub const UDP_SERVER: UniqueApidTargetId =
|
||||||
|
UniqueApidTargetId::new(Apid::Tmtc as u16, TmtcId::UdpServer as u32);
|
||||||
|
pub const TCP_SERVER: UniqueApidTargetId =
|
||||||
|
UniqueApidTargetId::new(Apid::Tmtc as u16, TmtcId::TcpServer as u32);
|
||||||
|
pub const NO_SENDER: ComponentId = ComponentId::MAX;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod pool {
|
pub mod pool {
|
||||||
|
@ -8,22 +8,15 @@ use derive_new::new;
|
|||||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||||
use satrs::{
|
use satrs::{
|
||||||
hk::{HkRequest, HkRequestVariant},
|
hk::{HkRequest, HkRequestVariant},
|
||||||
mode::{
|
mode::{ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler},
|
||||||
ModeAndSubmode, ModeError, ModeProvider, ModeReply, ModeRequestHandler,
|
|
||||||
ModeRequestHandlerMpscBounded,
|
|
||||||
},
|
|
||||||
mode_tree::{ModeChild, ModeNode},
|
|
||||||
power::SwitchRequest,
|
power::SwitchRequest,
|
||||||
pus::{EcssTmSender, PusTmVariant},
|
pus::{EcssTmSender, PusTmVariant},
|
||||||
queue::GenericSendError,
|
queue::{GenericSendError, GenericTargetedMessagingError},
|
||||||
request::{GenericMessage, MessageMetadata, UniqueApidTargetId},
|
request::{GenericMessage, MessageMetadata, UniqueApidTargetId},
|
||||||
spacepackets::ByteConversionError,
|
spacepackets::ByteConversionError,
|
||||||
};
|
};
|
||||||
use satrs_example::{
|
use satrs_example::{
|
||||||
config::{
|
config::components::{NO_SENDER, PUS_MODE_SERVICE},
|
||||||
components::{NO_SENDER, PCDU_HANDLER},
|
|
||||||
pus::PUS_MODE_SERVICE,
|
|
||||||
},
|
|
||||||
DeviceMode, TimestampHelper,
|
DeviceMode, TimestampHelper,
|
||||||
};
|
};
|
||||||
use satrs_minisim::{
|
use satrs_minisim::{
|
||||||
@ -35,10 +28,10 @@ use satrs_minisim::{
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
acs::mgm::MpscModeLeafInterface,
|
||||||
hk::PusHkHelper,
|
hk::PusHkHelper,
|
||||||
pus::hk::{HkReply, HkReplyVariant},
|
pus::hk::{HkReply, HkReplyVariant},
|
||||||
requests::CompositeRequest,
|
requests::CompositeRequest,
|
||||||
tmtc::sender::TmTcSender,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait SerialInterface {
|
pub trait SerialInterface {
|
||||||
@ -207,14 +200,14 @@ pub type SharedSwitchSet = Arc<Mutex<SwitchSet>>;
|
|||||||
/// Example PCDU device handler.
|
/// Example PCDU device handler.
|
||||||
#[derive(new)]
|
#[derive(new)]
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub struct PcduHandler<ComInterface: SerialInterface> {
|
pub struct PcduHandler<ComInterface: SerialInterface, TmSender: EcssTmSender> {
|
||||||
id: UniqueApidTargetId,
|
id: UniqueApidTargetId,
|
||||||
dev_str: &'static str,
|
dev_str: &'static str,
|
||||||
mode_node: ModeRequestHandlerMpscBounded,
|
mode_interface: MpscModeLeafInterface,
|
||||||
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
|
composite_request_rx: mpsc::Receiver<GenericMessage<CompositeRequest>>,
|
||||||
hk_reply_tx: mpsc::SyncSender<GenericMessage<HkReply>>,
|
hk_reply_tx: mpsc::Sender<GenericMessage<HkReply>>,
|
||||||
switch_request_rx: mpsc::Receiver<GenericMessage<SwitchRequest>>,
|
switch_request_rx: mpsc::Receiver<GenericMessage<SwitchRequest>>,
|
||||||
tm_sender: TmTcSender,
|
tm_sender: TmSender,
|
||||||
pub com_interface: ComInterface,
|
pub com_interface: ComInterface,
|
||||||
shared_switch_map: Arc<Mutex<SwitchSet>>,
|
shared_switch_map: Arc<Mutex<SwitchSet>>,
|
||||||
#[new(value = "PusHkHelper::new(id)")]
|
#[new(value = "PusHkHelper::new(id)")]
|
||||||
@ -227,7 +220,7 @@ pub struct PcduHandler<ComInterface: SerialInterface> {
|
|||||||
tm_buf: [u8; 256],
|
tm_buf: [u8; 256],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<ComInterface: SerialInterface> PcduHandler<ComInterface> {
|
impl<ComInterface: SerialInterface, TmSender: EcssTmSender> PcduHandler<ComInterface, TmSender> {
|
||||||
pub fn periodic_operation(&mut self, op_code: OpCode) {
|
pub fn periodic_operation(&mut self, op_code: OpCode) {
|
||||||
match op_code {
|
match op_code {
|
||||||
OpCode::RegularOp => {
|
OpCode::RegularOp => {
|
||||||
@ -331,9 +324,8 @@ impl<ComInterface: SerialInterface> PcduHandler<ComInterface> {
|
|||||||
pub fn handle_mode_requests(&mut self) {
|
pub fn handle_mode_requests(&mut self) {
|
||||||
loop {
|
loop {
|
||||||
// TODO: Only allow one set mode request per cycle?
|
// TODO: Only allow one set mode request per cycle?
|
||||||
match self.mode_node.try_recv_mode_request() {
|
match self.mode_interface.request_rx.try_recv() {
|
||||||
Ok(opt_msg) => {
|
Ok(msg) => {
|
||||||
if let Some(msg) = opt_msg {
|
|
||||||
let result = self.handle_mode_request(msg);
|
let result = self.handle_mode_request(msg);
|
||||||
// TODO: Trigger event?
|
// TODO: Trigger event?
|
||||||
if result.is_err() {
|
if result.is_err() {
|
||||||
@ -343,18 +335,14 @@ impl<ComInterface: SerialInterface> PcduHandler<ComInterface> {
|
|||||||
result.err().unwrap()
|
result.err().unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if e != mpsc::TryRecvError::Empty {
|
||||||
|
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => match e {
|
|
||||||
satrs::queue::GenericReceiveError::Empty => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
satrs::queue::GenericReceiveError::TxDisconnected(_) => {
|
|
||||||
log::warn!("{}: failed to receive mode request: {:?}", self.dev_str, e);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -408,13 +396,17 @@ impl<ComInterface: SerialInterface> PcduHandler<ComInterface> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<ComInterface: SerialInterface> ModeProvider for PcduHandler<ComInterface> {
|
impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeProvider
|
||||||
|
for PcduHandler<ComInterface, TmSender>
|
||||||
|
{
|
||||||
fn mode_and_submode(&self) -> ModeAndSubmode {
|
fn mode_and_submode(&self) -> ModeAndSubmode {
|
||||||
self.mode_and_submode
|
self.mode_and_submode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<ComInterface: SerialInterface> ModeRequestHandler for PcduHandler<ComInterface> {
|
impl<ComInterface: SerialInterface, TmSender: EcssTmSender> ModeRequestHandler
|
||||||
|
for PcduHandler<ComInterface, TmSender>
|
||||||
|
{
|
||||||
type Error = ModeError;
|
type Error = ModeError;
|
||||||
fn start_transition(
|
fn start_transition(
|
||||||
&mut self,
|
&mut self,
|
||||||
@ -475,9 +467,10 @@ impl<ComInterface: SerialInterface> ModeRequestHandler for PcduHandler<ComInterf
|
|||||||
requestor.sender_id()
|
requestor.sender_id()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.mode_node
|
self.mode_interface
|
||||||
.send_mode_reply(requestor, reply)
|
.reply_to_pus_tx
|
||||||
.map_err(|_| GenericSendError::RxDisconnected)?;
|
.send(GenericMessage::new(requestor, reply))
|
||||||
|
.map_err(|_| GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -490,20 +483,6 @@ impl<ComInterface: SerialInterface> ModeRequestHandler for PcduHandler<ComInterf
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<ComInterface: SerialInterface> ModeNode for PcduHandler<ComInterface> {
|
|
||||||
fn id(&self) -> satrs::ComponentId {
|
|
||||||
PCDU_HANDLER.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<ComInterface: SerialInterface> ModeChild for PcduHandler<ComInterface> {
|
|
||||||
type Sender = mpsc::SyncSender<GenericMessage<ModeReply>>;
|
|
||||||
|
|
||||||
fn add_mode_parent(&mut self, id: satrs::ComponentId, reply_sender: Self::Sender) {
|
|
||||||
self.mode_node.add_message_target(id, reply_sender);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
@ -511,10 +490,7 @@ mod tests {
|
|||||||
use satrs::{
|
use satrs::{
|
||||||
mode::ModeRequest, power::SwitchStateBinary, request::GenericMessage, tmtc::PacketAsVec,
|
mode::ModeRequest, power::SwitchStateBinary, request::GenericMessage, tmtc::PacketAsVec,
|
||||||
};
|
};
|
||||||
use satrs_example::config::{
|
use satrs_example::config::components::{Apid, MGM_HANDLER_0};
|
||||||
acs::MGM_HANDLER_0,
|
|
||||||
components::{Apid, EPS_SUBSYSTEM, PCDU_HANDLER},
|
|
||||||
};
|
|
||||||
use satrs_minisim::eps::SwitchMapBinary;
|
use satrs_minisim::eps::SwitchMapBinary;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -555,41 +531,31 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct PcduTestbench {
|
pub struct PcduTestbench {
|
||||||
pub mode_request_tx: mpsc::SyncSender<GenericMessage<ModeRequest>>,
|
pub mode_request_tx: mpsc::Sender<GenericMessage<ModeRequest>>,
|
||||||
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
|
pub mode_reply_rx_to_pus: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
|
pub mode_reply_rx_to_parent: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
|
pub composite_request_tx: mpsc::Sender<GenericMessage<CompositeRequest>>,
|
||||||
pub hk_reply_rx: mpsc::Receiver<GenericMessage<HkReply>>,
|
pub hk_reply_rx: mpsc::Receiver<GenericMessage<HkReply>>,
|
||||||
pub tm_rx: mpsc::Receiver<PacketAsVec>,
|
pub tm_rx: mpsc::Receiver<PacketAsVec>,
|
||||||
pub switch_request_tx: mpsc::Sender<GenericMessage<SwitchRequest>>,
|
pub switch_request_tx: mpsc::Sender<GenericMessage<SwitchRequest>>,
|
||||||
pub handler: PcduHandler<SerialInterfaceTest>,
|
pub handler: PcduHandler<SerialInterfaceTest, mpsc::Sender<PacketAsVec>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PcduTestbench {
|
impl PcduTestbench {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let (mode_request_tx, mode_request_rx) = mpsc::sync_channel(5);
|
let (mode_request_tx, mode_request_rx) = mpsc::channel();
|
||||||
let (mode_reply_tx_to_pus, mode_reply_rx_to_pus) = mpsc::sync_channel(5);
|
let (mode_reply_tx_to_pus, mode_reply_rx_to_pus) = mpsc::channel();
|
||||||
let (mode_reply_tx_to_parent, mode_reply_rx_to_parent) = mpsc::sync_channel(5);
|
let (mode_reply_tx_to_parent, mode_reply_rx_to_parent) = mpsc::sync_channel(5);
|
||||||
let mode_node =
|
let mode_interface = MpscModeLeafInterface {
|
||||||
ModeRequestHandlerMpscBounded::new(PCDU_HANDLER.into(), mode_request_rx);
|
request_rx: mode_request_rx,
|
||||||
|
reply_to_pus_tx: mode_reply_tx_to_pus,
|
||||||
|
reply_to_parent_tx: mode_reply_tx_to_parent,
|
||||||
|
};
|
||||||
let (composite_request_tx, composite_request_rx) = mpsc::channel();
|
let (composite_request_tx, composite_request_rx) = mpsc::channel();
|
||||||
let (hk_reply_tx, hk_reply_rx) = mpsc::sync_channel(10);
|
let (hk_reply_tx, hk_reply_rx) = mpsc::channel();
|
||||||
let (tm_tx, tm_rx) = mpsc::sync_channel::<PacketAsVec>(5);
|
let (tm_tx, tm_rx) = mpsc::channel::<PacketAsVec>();
|
||||||
let (switch_request_tx, switch_reqest_rx) = mpsc::channel();
|
let (switch_request_tx, switch_reqest_rx) = mpsc::channel();
|
||||||
let shared_switch_map = Arc::new(Mutex::new(SwitchSet::default()));
|
let shared_switch_map = Arc::new(Mutex::new(SwitchSet::default()));
|
||||||
let mut handler = PcduHandler::new(
|
|
||||||
UniqueApidTargetId::new(Apid::Eps as u16, 0),
|
|
||||||
"TEST_PCDU",
|
|
||||||
mode_node,
|
|
||||||
composite_request_rx,
|
|
||||||
hk_reply_tx,
|
|
||||||
switch_reqest_rx,
|
|
||||||
TmTcSender::Heap(tm_tx.clone()),
|
|
||||||
SerialInterfaceTest::default(),
|
|
||||||
shared_switch_map,
|
|
||||||
);
|
|
||||||
handler.add_mode_parent(EPS_SUBSYSTEM.into(), mode_reply_tx_to_parent);
|
|
||||||
handler.add_mode_parent(PUS_MODE_SERVICE.into(), mode_reply_tx_to_pus);
|
|
||||||
Self {
|
Self {
|
||||||
mode_request_tx,
|
mode_request_tx,
|
||||||
mode_reply_rx_to_pus,
|
mode_reply_rx_to_pus,
|
||||||
@ -598,7 +564,17 @@ mod tests {
|
|||||||
hk_reply_rx,
|
hk_reply_rx,
|
||||||
tm_rx,
|
tm_rx,
|
||||||
switch_request_tx,
|
switch_request_tx,
|
||||||
handler,
|
handler: PcduHandler::new(
|
||||||
|
UniqueApidTargetId::new(Apid::Eps as u16, 0),
|
||||||
|
"TEST_PCDU",
|
||||||
|
mode_interface,
|
||||||
|
composite_request_rx,
|
||||||
|
hk_reply_tx,
|
||||||
|
switch_reqest_rx,
|
||||||
|
tm_tx,
|
||||||
|
SerialInterfaceTest::default(),
|
||||||
|
shared_switch_map,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ use satrs::{
|
|||||||
},
|
},
|
||||||
spacepackets::time::cds::CdsTime,
|
spacepackets::time::cds::CdsTime,
|
||||||
};
|
};
|
||||||
use satrs_example::config::pus::PUS_EVENT_MANAGEMENT;
|
use satrs_example::config::components::PUS_EVENT_MANAGEMENT;
|
||||||
|
|
||||||
use crate::update_time;
|
use crate::update_time;
|
||||||
|
|
||||||
|
@ -1,20 +1,19 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashSet, VecDeque},
|
collections::{HashSet, VecDeque},
|
||||||
|
fmt::Debug,
|
||||||
|
marker::PhantomData,
|
||||||
sync::{Arc, Mutex},
|
sync::{Arc, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use satrs::tmtc::StoreAndSendError;
|
|
||||||
use satrs::{
|
use satrs::{
|
||||||
encoding::ccsds::{SpValidity, SpacePacketValidator},
|
encoding::ccsds::{SpValidity, SpacePacketValidator},
|
||||||
hal::std::tcp_server::{HandledConnectionHandler, ServerConfig, TcpSpacepacketsServer},
|
hal::std::tcp_server::{HandledConnectionHandler, ServerConfig, TcpSpacepacketsServer},
|
||||||
spacepackets::{CcsdsPacket, PacketId},
|
spacepackets::{CcsdsPacket, PacketId},
|
||||||
tmtc::PacketSource,
|
tmtc::{PacketSenderRaw, PacketSource},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct ConnectionFinishedHandler {}
|
pub struct ConnectionFinishedHandler {}
|
||||||
|
|
||||||
@ -112,23 +111,31 @@ pub type TcpServer<ReceivesTc, SendError> = TcpSpacepacketsServer<
|
|||||||
SendError,
|
SendError,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
pub struct TcpTask(pub TcpServer<TmTcSender, StoreAndSendError>);
|
pub struct TcpTask<TcSender: PacketSenderRaw<Error = SendError>, SendError: Debug + 'static>(
|
||||||
|
pub TcpServer<TcSender, SendError>,
|
||||||
|
PhantomData<SendError>,
|
||||||
|
);
|
||||||
|
|
||||||
impl TcpTask {
|
impl<TcSender: PacketSenderRaw<Error = SendError>, SendError: Debug + 'static>
|
||||||
|
TcpTask<TcSender, SendError>
|
||||||
|
{
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cfg: ServerConfig,
|
cfg: ServerConfig,
|
||||||
tm_source: SyncTcpTmSource,
|
tm_source: SyncTcpTmSource,
|
||||||
tc_sender: TmTcSender,
|
tc_sender: TcSender,
|
||||||
valid_ids: HashSet<PacketId>,
|
valid_ids: HashSet<PacketId>,
|
||||||
) -> Result<Self, std::io::Error> {
|
) -> Result<Self, std::io::Error> {
|
||||||
Ok(Self(TcpSpacepacketsServer::new(
|
Ok(Self(
|
||||||
|
TcpSpacepacketsServer::new(
|
||||||
cfg,
|
cfg,
|
||||||
tm_source,
|
tm_source,
|
||||||
tc_sender,
|
tc_sender,
|
||||||
SimplePacketValidator { valid_ids },
|
SimplePacketValidator { valid_ids },
|
||||||
ConnectionFinishedHandler::default(),
|
ConnectionFinishedHandler::default(),
|
||||||
None,
|
None,
|
||||||
)?))
|
)?,
|
||||||
|
PhantomData,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn periodic_operation(&mut self) {
|
pub fn periodic_operation(&mut self) {
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
|
use core::fmt::Debug;
|
||||||
use std::net::{SocketAddr, UdpSocket};
|
use std::net::{SocketAddr, UdpSocket};
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
|
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use satrs::pus::HandlingStatus;
|
use satrs::pus::HandlingStatus;
|
||||||
use satrs::tmtc::{PacketAsVec, PacketInPool, StoreAndSendError};
|
use satrs::tmtc::{PacketAsVec, PacketInPool, PacketSenderRaw};
|
||||||
use satrs::{
|
use satrs::{
|
||||||
hal::std::udp_server::{ReceiveResult, UdpTcServer},
|
hal::std::udp_server::{ReceiveResult, UdpTcServer},
|
||||||
pool::{PoolProviderWithGuards, SharedStaticMemoryPool},
|
pool::{PoolProviderWithGuards, SharedStaticMemoryPool},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
|
|
||||||
pub trait UdpTmHandler {
|
pub trait UdpTmHandler {
|
||||||
fn send_tm_to_udp_client(&mut self, socket: &UdpSocket, recv_addr: &SocketAddr);
|
fn send_tm_to_udp_client(&mut self, socket: &UdpSocket, recv_addr: &SocketAddr);
|
||||||
}
|
}
|
||||||
@ -66,12 +65,21 @@ impl UdpTmHandler for DynamicUdpTmHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct UdpTmtcServer<TmHandler: UdpTmHandler> {
|
pub struct UdpTmtcServer<
|
||||||
pub udp_tc_server: UdpTcServer<TmTcSender, StoreAndSendError>,
|
TcSender: PacketSenderRaw<Error = SendError>,
|
||||||
|
TmHandler: UdpTmHandler,
|
||||||
|
SendError,
|
||||||
|
> {
|
||||||
|
pub udp_tc_server: UdpTcServer<TcSender, SendError>,
|
||||||
pub tm_handler: TmHandler,
|
pub tm_handler: TmHandler,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TmHandler: UdpTmHandler> UdpTmtcServer<TmHandler> {
|
impl<
|
||||||
|
TcSender: PacketSenderRaw<Error = SendError>,
|
||||||
|
TmHandler: UdpTmHandler,
|
||||||
|
SendError: Debug + 'static,
|
||||||
|
> UdpTmtcServer<TcSender, TmHandler, SendError>
|
||||||
|
{
|
||||||
pub fn periodic_operation(&mut self) {
|
pub fn periodic_operation(&mut self) {
|
||||||
loop {
|
loop {
|
||||||
if self.poll_tc_server() == HandlingStatus::Empty {
|
if self.poll_tc_server() == HandlingStatus::Empty {
|
||||||
@ -107,6 +115,7 @@ impl<TmHandler: UdpTmHandler> UdpTmtcServer<TmHandler> {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use std::net::Ipv4Addr;
|
use std::net::Ipv4Addr;
|
||||||
use std::{
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
collections::VecDeque,
|
collections::VecDeque,
|
||||||
net::IpAddr,
|
net::IpAddr,
|
||||||
sync::{Arc, Mutex},
|
sync::{Arc, Mutex},
|
||||||
@ -117,16 +126,30 @@ mod tests {
|
|||||||
ecss::{tc::PusTcCreator, WritablePusPacket},
|
ecss::{tc::PusTcCreator, WritablePusPacket},
|
||||||
SpHeader,
|
SpHeader,
|
||||||
},
|
},
|
||||||
|
tmtc::PacketSenderRaw,
|
||||||
ComponentId,
|
ComponentId,
|
||||||
};
|
};
|
||||||
use satrs_example::config::{components, OBSW_SERVER_ADDR};
|
use satrs_example::config::{components, OBSW_SERVER_ADDR};
|
||||||
|
|
||||||
use crate::tmtc::sender::{MockSender, TmTcSender};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
const UDP_SERVER_ID: ComponentId = 0x05;
|
const UDP_SERVER_ID: ComponentId = 0x05;
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub struct TestSender {
|
||||||
|
tc_vec: RefCell<VecDeque<PacketAsVec>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PacketSenderRaw for TestSender {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn send_packet(&self, sender_id: ComponentId, tc_raw: &[u8]) -> Result<(), Self::Error> {
|
||||||
|
let mut mut_queue = self.tc_vec.borrow_mut();
|
||||||
|
mut_queue.push_back(PacketAsVec::new(sender_id, tc_raw.to_vec()));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone)]
|
#[derive(Default, Debug, Clone)]
|
||||||
pub struct TestTmHandler {
|
pub struct TestTmHandler {
|
||||||
addrs_to_send_to: Arc<Mutex<VecDeque<SocketAddr>>>,
|
addrs_to_send_to: Arc<Mutex<VecDeque<SocketAddr>>>,
|
||||||
@ -141,7 +164,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_basic() {
|
fn test_basic() {
|
||||||
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), 0);
|
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), 0);
|
||||||
let test_receiver = TmTcSender::Mock(MockSender::default());
|
let test_receiver = TestSender::default();
|
||||||
|
// let tc_queue = test_receiver.tc_vec.clone();
|
||||||
let udp_tc_server =
|
let udp_tc_server =
|
||||||
UdpTcServer::new(UDP_SERVER_ID, sock_addr, 2048, test_receiver).unwrap();
|
UdpTcServer::new(UDP_SERVER_ID, sock_addr, 2048, test_receiver).unwrap();
|
||||||
let tm_handler = TestTmHandler::default();
|
let tm_handler = TestTmHandler::default();
|
||||||
@ -151,13 +175,7 @@ mod tests {
|
|||||||
tm_handler,
|
tm_handler,
|
||||||
};
|
};
|
||||||
udp_dyn_server.periodic_operation();
|
udp_dyn_server.periodic_operation();
|
||||||
let queue = udp_dyn_server
|
let queue = udp_dyn_server.udp_tc_server.tc_sender.tc_vec.borrow();
|
||||||
.udp_tc_server
|
|
||||||
.tc_sender
|
|
||||||
.get_mock_sender()
|
|
||||||
.unwrap()
|
|
||||||
.0
|
|
||||||
.borrow();
|
|
||||||
assert!(queue.is_empty());
|
assert!(queue.is_empty());
|
||||||
assert!(tm_handler_calls.lock().unwrap().is_empty());
|
assert!(tm_handler_calls.lock().unwrap().is_empty());
|
||||||
}
|
}
|
||||||
@ -165,7 +183,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_transactions() {
|
fn test_transactions() {
|
||||||
let sock_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0);
|
let sock_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0);
|
||||||
let test_receiver = TmTcSender::Mock(MockSender::default());
|
let test_receiver = TestSender::default();
|
||||||
|
// let tc_queue = test_receiver.tc_vec.clone();
|
||||||
let udp_tc_server =
|
let udp_tc_server =
|
||||||
UdpTcServer::new(UDP_SERVER_ID, sock_addr, 2048, test_receiver).unwrap();
|
UdpTcServer::new(UDP_SERVER_ID, sock_addr, 2048, test_receiver).unwrap();
|
||||||
let server_addr = udp_tc_server.socket.local_addr().unwrap();
|
let server_addr = udp_tc_server.socket.local_addr().unwrap();
|
||||||
@ -185,13 +204,7 @@ mod tests {
|
|||||||
client.send_to(&ping_tc, server_addr).unwrap();
|
client.send_to(&ping_tc, server_addr).unwrap();
|
||||||
udp_dyn_server.periodic_operation();
|
udp_dyn_server.periodic_operation();
|
||||||
{
|
{
|
||||||
let mut queue = udp_dyn_server
|
let mut queue = udp_dyn_server.udp_tc_server.tc_sender.tc_vec.borrow_mut();
|
||||||
.udp_tc_server
|
|
||||||
.tc_sender
|
|
||||||
.get_mock_sender()
|
|
||||||
.unwrap()
|
|
||||||
.0
|
|
||||||
.borrow_mut();
|
|
||||||
assert!(!queue.is_empty());
|
assert!(!queue.is_empty());
|
||||||
let packet_with_sender = queue.pop_front().unwrap();
|
let packet_with_sender = queue.pop_front().unwrap();
|
||||||
assert_eq!(packet_with_sender.packet, ping_tc);
|
assert_eq!(packet_with_sender.packet, ping_tc);
|
||||||
@ -206,13 +219,7 @@ mod tests {
|
|||||||
assert_eq!(received_addr, client_addr);
|
assert_eq!(received_addr, client_addr);
|
||||||
}
|
}
|
||||||
udp_dyn_server.periodic_operation();
|
udp_dyn_server.periodic_operation();
|
||||||
let queue = udp_dyn_server
|
let queue = udp_dyn_server.udp_tc_server.tc_sender.tc_vec.borrow();
|
||||||
.udp_tc_server
|
|
||||||
.tc_sender
|
|
||||||
.get_mock_sender()
|
|
||||||
.unwrap()
|
|
||||||
.0
|
|
||||||
.borrow();
|
|
||||||
assert!(queue.is_empty());
|
assert!(queue.is_empty());
|
||||||
drop(queue);
|
drop(queue);
|
||||||
// Still tries to send to the same client.
|
// Still tries to send to the same client.
|
||||||
|
@ -1,73 +1,3 @@
|
|||||||
use std::{
|
|
||||||
net::{IpAddr, SocketAddr},
|
|
||||||
sync::{mpsc, Arc, Mutex},
|
|
||||||
thread,
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use acs::mgm::{MgmHandlerLis3Mdl, SpiDummyInterface, SpiSimInterface, SpiSimInterfaceWrapper};
|
|
||||||
use eps::{
|
|
||||||
pcdu::{PcduHandler, SerialInterfaceDummy, SerialInterfaceToSim, SerialSimInterfaceWrapper},
|
|
||||||
PowerSwitchHelper,
|
|
||||||
};
|
|
||||||
use events::EventHandler;
|
|
||||||
use interface::{
|
|
||||||
sim_client_udp::create_sim_client,
|
|
||||||
tcp::{SyncTcpTmSource, TcpTask},
|
|
||||||
udp::UdpTmtcServer,
|
|
||||||
};
|
|
||||||
use log::info;
|
|
||||||
use logger::setup_logger;
|
|
||||||
use pus::{
|
|
||||||
action::create_action_service,
|
|
||||||
event::create_event_service,
|
|
||||||
hk::create_hk_service,
|
|
||||||
mode::create_mode_service,
|
|
||||||
scheduler::{create_scheduler_service, TcReleaser},
|
|
||||||
stack::PusStack,
|
|
||||||
test::create_test_service,
|
|
||||||
PusTcDistributor, PusTcMpscRouter,
|
|
||||||
};
|
|
||||||
use requests::GenericRequestRouter;
|
|
||||||
use satrs::{
|
|
||||||
hal::std::{tcp_server::ServerConfig, udp_server::UdpTcServer},
|
|
||||||
mode::{Mode, ModeAndSubmode, ModeRequest, ModeRequestHandlerMpscBounded},
|
|
||||||
mode_tree::connect_mode_nodes,
|
|
||||||
pus::{event_man::EventRequestWithToken, EcssTcInMemConverter, HandlingStatus},
|
|
||||||
request::{GenericMessage, MessageMetadata},
|
|
||||||
spacepackets::time::{cds::CdsTime, TimeWriter},
|
|
||||||
};
|
|
||||||
use satrs_example::{
|
|
||||||
config::{
|
|
||||||
acs::{MGM_HANDLER_0, MGM_HANDLER_1},
|
|
||||||
components::{NO_SENDER, PCDU_HANDLER, TCP_SERVER, UDP_SERVER},
|
|
||||||
pool::create_sched_tc_pool,
|
|
||||||
tasks::{FREQ_MS_AOCS, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC, SIM_CLIENT_IDLE_DELAY_MS},
|
|
||||||
OBSW_SERVER_ADDR, PACKET_ID_VALIDATOR, SERVER_PORT,
|
|
||||||
},
|
|
||||||
DeviceMode,
|
|
||||||
};
|
|
||||||
use tmtc::sender::TmTcSender;
|
|
||||||
use tmtc::{tc_source::TcSourceTask, tm_sink::TmSink};
|
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
|
||||||
if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
use interface::udp::DynamicUdpTmHandler;
|
|
||||||
use satrs::pus::EcssTcInVecConverter;
|
|
||||||
use tmtc::{tc_source::TcSourceTaskDynamic, tm_sink::TmSinkDynamic};
|
|
||||||
} else {
|
|
||||||
use std::sync::RwLock;
|
|
||||||
use interface::udp::StaticUdpTmHandler;
|
|
||||||
use satrs::pus::EcssTcInSharedPoolConverter;
|
|
||||||
use satrs::tmtc::{PacketSenderWithSharedPool, SharedPacketPool};
|
|
||||||
use satrs_example::config::pool::create_static_pools;
|
|
||||||
use tmtc::{
|
|
||||||
tc_source::TcSourceTaskStatic,
|
|
||||||
tm_sink::TmSinkStatic,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod acs;
|
mod acs;
|
||||||
mod eps;
|
mod eps;
|
||||||
mod events;
|
mod events;
|
||||||
@ -76,74 +6,106 @@ mod interface;
|
|||||||
mod logger;
|
mod logger;
|
||||||
mod pus;
|
mod pus;
|
||||||
mod requests;
|
mod requests;
|
||||||
mod spi;
|
|
||||||
mod tmtc;
|
mod tmtc;
|
||||||
|
|
||||||
fn main() {
|
use crate::eps::pcdu::{
|
||||||
setup_logger().expect("setting up logging with fern failed");
|
PcduHandler, SerialInterfaceDummy, SerialInterfaceToSim, SerialSimInterfaceWrapper,
|
||||||
println!("Running OBSW example");
|
};
|
||||||
|
use crate::eps::PowerSwitchHelper;
|
||||||
|
use crate::events::EventHandler;
|
||||||
|
use crate::interface::udp::DynamicUdpTmHandler;
|
||||||
|
use crate::pus::stack::PusStack;
|
||||||
|
use crate::tmtc::tc_source::{TcSourceTaskDynamic, TcSourceTaskStatic};
|
||||||
|
use crate::tmtc::tm_sink::{TmSinkDynamic, TmSinkStatic};
|
||||||
|
use log::info;
|
||||||
|
use pus::test::create_test_service_dynamic;
|
||||||
|
use satrs::hal::std::tcp_server::ServerConfig;
|
||||||
|
use satrs::hal::std::udp_server::UdpTcServer;
|
||||||
|
use satrs::pus::HandlingStatus;
|
||||||
|
use satrs::request::{GenericMessage, MessageMetadata};
|
||||||
|
use satrs::tmtc::{PacketSenderWithSharedPool, SharedPacketPool};
|
||||||
|
use satrs_example::config::pool::{create_sched_tc_pool, create_static_pools};
|
||||||
|
use satrs_example::config::tasks::{
|
||||||
|
FREQ_MS_AOCS, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC, SIM_CLIENT_IDLE_DELAY_MS,
|
||||||
|
};
|
||||||
|
use satrs_example::config::{OBSW_SERVER_ADDR, PACKET_ID_VALIDATOR, SERVER_PORT};
|
||||||
|
use satrs_example::DeviceMode;
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
use crate::acs::mgm::{
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
MgmHandlerLis3Mdl, MpscModeLeafInterface, SpiDummyInterface, SpiSimInterface,
|
||||||
|
SpiSimInterfaceWrapper,
|
||||||
|
};
|
||||||
|
use crate::interface::sim_client_udp::create_sim_client;
|
||||||
|
use crate::interface::tcp::{SyncTcpTmSource, TcpTask};
|
||||||
|
use crate::interface::udp::{StaticUdpTmHandler, UdpTmtcServer};
|
||||||
|
use crate::logger::setup_logger;
|
||||||
|
use crate::pus::action::{create_action_service_dynamic, create_action_service_static};
|
||||||
|
use crate::pus::event::{create_event_service_dynamic, create_event_service_static};
|
||||||
|
use crate::pus::hk::{create_hk_service_dynamic, create_hk_service_static};
|
||||||
|
use crate::pus::mode::{create_mode_service_dynamic, create_mode_service_static};
|
||||||
|
use crate::pus::scheduler::{create_scheduler_service_dynamic, create_scheduler_service_static};
|
||||||
|
use crate::pus::test::create_test_service_static;
|
||||||
|
use crate::pus::{PusTcDistributor, PusTcMpscRouter};
|
||||||
|
use crate::requests::{CompositeRequest, GenericRequestRouter};
|
||||||
|
use satrs::mode::{Mode, ModeAndSubmode, ModeRequest};
|
||||||
|
use satrs::pus::event_man::EventRequestWithToken;
|
||||||
|
use satrs::spacepackets::{time::cds::CdsTime, time::TimeWriter};
|
||||||
|
use satrs_example::config::components::{
|
||||||
|
MGM_HANDLER_0, NO_SENDER, PCDU_HANDLER, TCP_SERVER, UDP_SERVER,
|
||||||
|
};
|
||||||
|
use std::net::{IpAddr, SocketAddr};
|
||||||
|
use std::sync::{mpsc, Mutex};
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use std::thread;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn static_tmtc_pool_main() {
|
||||||
let (tm_pool, tc_pool) = create_static_pools();
|
let (tm_pool, tc_pool) = create_static_pools();
|
||||||
let shared_tm_pool = Arc::new(RwLock::new(tm_pool));
|
let shared_tm_pool = Arc::new(RwLock::new(tm_pool));
|
||||||
let shared_tc_pool = Arc::new(RwLock::new(tc_pool));
|
let shared_tc_pool = Arc::new(RwLock::new(tc_pool));
|
||||||
let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool);
|
let shared_tm_pool_wrapper = SharedPacketPool::new(&shared_tm_pool);
|
||||||
let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool);
|
let shared_tc_pool_wrapper = SharedPacketPool::new(&shared_tc_pool);
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50);
|
let (tc_source_tx, tc_source_rx) = mpsc::sync_channel(50);
|
||||||
let (tm_sink_tx, tm_sink_rx) = mpsc::sync_channel(50);
|
let (tm_sink_tx, tm_sink_rx) = mpsc::sync_channel(50);
|
||||||
let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
|
let (tm_server_tx, tm_server_rx) = mpsc::sync_channel(50);
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
let tm_sink_tx_sender =
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
PacketSenderWithSharedPool::new(tm_sink_tx.clone(), shared_tm_pool_wrapper.clone());
|
||||||
let tm_sender = TmTcSender::Static(
|
|
||||||
PacketSenderWithSharedPool::new(tm_sink_tx.clone(), shared_tm_pool_wrapper.clone())
|
|
||||||
);
|
|
||||||
} else if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
let tm_sender = TmTcSender::Heap(tm_sink_tx.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let (sim_request_tx, sim_request_rx) = mpsc::channel();
|
let (sim_request_tx, sim_request_rx) = mpsc::channel();
|
||||||
let (mgm_0_sim_reply_tx, mgm_0_sim_reply_rx) = mpsc::channel();
|
let (mgm_sim_reply_tx, mgm_sim_reply_rx) = mpsc::channel();
|
||||||
let (mgm_1_sim_reply_tx, mgm_1_sim_reply_rx) = mpsc::channel();
|
|
||||||
let (pcdu_sim_reply_tx, pcdu_sim_reply_rx) = mpsc::channel();
|
let (pcdu_sim_reply_tx, pcdu_sim_reply_rx) = mpsc::channel();
|
||||||
let mut opt_sim_client = create_sim_client(sim_request_rx);
|
let mut opt_sim_client = create_sim_client(sim_request_rx);
|
||||||
|
|
||||||
let (mgm_0_handler_composite_tx, mgm_0_handler_composite_rx) = mpsc::sync_channel(10);
|
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
|
||||||
let (mgm_1_handler_composite_tx, mgm_1_handler_composite_rx) = mpsc::sync_channel(10);
|
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(10);
|
||||||
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) = mpsc::sync_channel(30);
|
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) =
|
||||||
let (mgm_0_handler_mode_tx, mgm_0_handler_mode_rx) = mpsc::sync_channel(5);
|
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(30);
|
||||||
let (mgm_1_handler_mode_tx, mgm_1_handler_mode_rx) = mpsc::sync_channel(5);
|
|
||||||
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) = mpsc::sync_channel(5);
|
let (mgm_handler_mode_tx, mgm_handler_mode_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
|
||||||
|
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
|
||||||
|
|
||||||
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||||
let mut request_map = GenericRequestRouter::default();
|
let mut request_map = GenericRequestRouter::default();
|
||||||
request_map
|
request_map
|
||||||
.composite_router_map
|
.composite_router_map
|
||||||
.insert(MGM_HANDLER_0.id(), mgm_0_handler_composite_tx);
|
.insert(MGM_HANDLER_0.id(), mgm_handler_composite_tx);
|
||||||
request_map
|
request_map
|
||||||
.composite_router_map
|
.mode_router_map
|
||||||
.insert(MGM_HANDLER_0.id(), mgm_1_handler_composite_tx);
|
.insert(MGM_HANDLER_0.id(), mgm_handler_mode_tx);
|
||||||
request_map
|
request_map
|
||||||
.composite_router_map
|
.composite_router_map
|
||||||
.insert(PCDU_HANDLER.id(), pcdu_handler_composite_tx);
|
.insert(PCDU_HANDLER.id(), pcdu_handler_composite_tx);
|
||||||
|
request_map
|
||||||
|
.mode_router_map
|
||||||
|
.insert(PCDU_HANDLER.id(), pcdu_handler_mode_tx.clone());
|
||||||
|
|
||||||
// This helper structure is used by all telecommand providers which need to send telecommands
|
// This helper structure is used by all telecommand providers which need to send telecommands
|
||||||
// to the TC source.
|
// to the TC source.
|
||||||
cfg_if::cfg_if! {
|
let tc_source = PacketSenderWithSharedPool::new(tc_source_tx, shared_tc_pool_wrapper.clone());
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
|
||||||
let tc_sender_with_shared_pool =
|
|
||||||
PacketSenderWithSharedPool::new(tc_source_tx, shared_tc_pool_wrapper.clone());
|
|
||||||
let tc_in_mem_converter =
|
|
||||||
EcssTcInMemConverter::Static(EcssTcInSharedPoolConverter::new(shared_tc_pool, 4096));
|
|
||||||
} else if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
let tc_in_mem_converter = EcssTcInMemConverter::Heap(EcssTcInVecConverter::default());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create event handling components
|
// Create event handling components
|
||||||
// These sender handles are used to send event requests, for example to enable or disable
|
// These sender handles are used to send event requests, for example to enable or disable
|
||||||
@ -155,24 +117,17 @@ fn main() {
|
|||||||
// in the sat-rs documentation.
|
// in the sat-rs documentation.
|
||||||
let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
|
let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
|
||||||
|
|
||||||
let (pus_test_tx, pus_test_rx) = mpsc::sync_channel(20);
|
let (pus_test_tx, pus_test_rx) = mpsc::channel();
|
||||||
let (pus_event_tx, pus_event_rx) = mpsc::sync_channel(10);
|
let (pus_event_tx, pus_event_rx) = mpsc::channel();
|
||||||
let (pus_sched_tx, pus_sched_rx) = mpsc::sync_channel(50);
|
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
|
||||||
let (pus_hk_tx, pus_hk_rx) = mpsc::sync_channel(50);
|
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
|
||||||
let (pus_action_tx, pus_action_rx) = mpsc::sync_channel(50);
|
let (pus_action_tx, pus_action_rx) = mpsc::channel();
|
||||||
let (pus_mode_tx, pus_mode_rx) = mpsc::sync_channel(50);
|
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
|
||||||
|
|
||||||
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
|
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
|
||||||
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::sync_channel(50);
|
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
|
||||||
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::sync_channel(30);
|
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
|
||||||
let tc_releaser = TcReleaser::Static(tc_sender_with_shared_pool.clone());
|
|
||||||
} else if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
let tc_releaser = TcReleaser::Heap(tc_source_tx.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let pus_router = PusTcMpscRouter {
|
let pus_router = PusTcMpscRouter {
|
||||||
test_tc_sender: pus_test_tx,
|
test_tc_sender: pus_test_tx,
|
||||||
event_tc_sender: pus_event_tx,
|
event_tc_sender: pus_event_tx,
|
||||||
@ -181,42 +136,41 @@ fn main() {
|
|||||||
action_tc_sender: pus_action_tx,
|
action_tc_sender: pus_action_tx,
|
||||||
mode_tc_sender: pus_mode_tx,
|
mode_tc_sender: pus_mode_tx,
|
||||||
};
|
};
|
||||||
let pus_test_service = create_test_service(
|
let pus_test_service = create_test_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
shared_tc_pool.clone(),
|
||||||
event_tx.clone(),
|
event_tx.clone(),
|
||||||
pus_test_rx,
|
pus_test_rx,
|
||||||
);
|
);
|
||||||
let pus_scheduler_service = create_scheduler_service(
|
let pus_scheduler_service = create_scheduler_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
tc_source.clone(),
|
||||||
tc_releaser,
|
|
||||||
pus_sched_rx,
|
pus_sched_rx,
|
||||||
create_sched_tc_pool(),
|
create_sched_tc_pool(),
|
||||||
);
|
);
|
||||||
let pus_event_service = create_event_service(
|
let pus_event_service = create_event_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
shared_tc_pool.clone(),
|
||||||
pus_event_rx,
|
pus_event_rx,
|
||||||
event_request_tx,
|
event_request_tx,
|
||||||
);
|
);
|
||||||
let pus_action_service = create_action_service(
|
let pus_action_service = create_action_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
shared_tc_pool.clone(),
|
||||||
pus_action_rx,
|
pus_action_rx,
|
||||||
request_map.clone(),
|
request_map.clone(),
|
||||||
pus_action_reply_rx,
|
pus_action_reply_rx,
|
||||||
);
|
);
|
||||||
let pus_hk_service = create_hk_service(
|
let pus_hk_service = create_hk_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
shared_tc_pool.clone(),
|
||||||
pus_hk_rx,
|
pus_hk_rx,
|
||||||
request_map.clone(),
|
request_map.clone(),
|
||||||
pus_hk_reply_rx,
|
pus_hk_reply_rx,
|
||||||
);
|
);
|
||||||
let pus_mode_service = create_mode_service(
|
let pus_mode_service = create_mode_service_static(
|
||||||
tm_sender.clone(),
|
tm_sink_tx_sender.clone(),
|
||||||
tc_in_mem_converter.clone(),
|
shared_tc_pool.clone(),
|
||||||
pus_mode_rx,
|
pus_mode_rx,
|
||||||
request_map,
|
request_map,
|
||||||
pus_mode_reply_rx,
|
pus_mode_reply_rx,
|
||||||
@ -230,36 +184,21 @@ fn main() {
|
|||||||
pus_mode_service,
|
pus_mode_service,
|
||||||
);
|
);
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
let mut tmtc_task = TcSourceTaskStatic::new(
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
|
||||||
let mut tmtc_task = TcSourceTask::Static(TcSourceTaskStatic::new(
|
|
||||||
shared_tc_pool_wrapper.clone(),
|
shared_tc_pool_wrapper.clone(),
|
||||||
tc_source_rx,
|
tc_source_rx,
|
||||||
PusTcDistributor::new(tm_sender.clone(), pus_router),
|
PusTcDistributor::new(tm_sink_tx_sender, pus_router),
|
||||||
));
|
);
|
||||||
let tc_sender = TmTcSender::Static(tc_sender_with_shared_pool);
|
|
||||||
let udp_tm_handler = StaticUdpTmHandler {
|
|
||||||
tm_rx: tm_server_rx,
|
|
||||||
tm_store: shared_tm_pool.clone(),
|
|
||||||
};
|
|
||||||
} else if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
let mut tmtc_task = TcSourceTask::Heap(TcSourceTaskDynamic::new(
|
|
||||||
tc_source_rx,
|
|
||||||
PusTcDistributor::new(tm_sender.clone(), pus_router),
|
|
||||||
));
|
|
||||||
let tc_sender = TmTcSender::Heap(tc_source_tx.clone());
|
|
||||||
let udp_tm_handler = DynamicUdpTmHandler {
|
|
||||||
tm_rx: tm_server_rx,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
||||||
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_sender.clone())
|
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_source.clone())
|
||||||
.expect("creating UDP TMTC server failed");
|
.expect("creating UDP TMTC server failed");
|
||||||
let mut udp_tmtc_server = UdpTmtcServer {
|
let mut udp_tmtc_server = UdpTmtcServer {
|
||||||
udp_tc_server,
|
udp_tc_server,
|
||||||
tm_handler: udp_tm_handler,
|
tm_handler: StaticUdpTmHandler {
|
||||||
|
tm_rx: tm_server_rx,
|
||||||
|
tm_store: shared_tm_pool.clone(),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let tcp_server_cfg = ServerConfig::new(
|
let tcp_server_cfg = ServerConfig::new(
|
||||||
@ -273,96 +212,60 @@ fn main() {
|
|||||||
let mut tcp_server = TcpTask::new(
|
let mut tcp_server = TcpTask::new(
|
||||||
tcp_server_cfg,
|
tcp_server_cfg,
|
||||||
sync_tm_tcp_source.clone(),
|
sync_tm_tcp_source.clone(),
|
||||||
tc_sender,
|
tc_source.clone(),
|
||||||
PACKET_ID_VALIDATOR.clone(),
|
PACKET_ID_VALIDATOR.clone(),
|
||||||
)
|
)
|
||||||
.expect("tcp server creation failed");
|
.expect("tcp server creation failed");
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
let mut tm_sink = TmSinkStatic::new(
|
||||||
if #[cfg(not(feature = "heap_tmtc"))] {
|
|
||||||
let mut tm_sink = TmSink::Static(TmSinkStatic::new(
|
|
||||||
shared_tm_pool_wrapper,
|
shared_tm_pool_wrapper,
|
||||||
sync_tm_tcp_source,
|
sync_tm_tcp_source,
|
||||||
tm_sink_rx,
|
tm_sink_rx,
|
||||||
tm_server_tx,
|
tm_server_tx,
|
||||||
));
|
);
|
||||||
} else if #[cfg(feature = "heap_tmtc")] {
|
|
||||||
let mut tm_sink = TmSink::Heap(TmSinkDynamic::new(
|
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
|
||||||
sync_tm_tcp_source,
|
mpsc::sync_channel(5);
|
||||||
tm_sink_rx,
|
|
||||||
tm_server_tx,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let shared_switch_set = Arc::new(Mutex::default());
|
let shared_switch_set = Arc::new(Mutex::default());
|
||||||
let (switch_request_tx, switch_request_rx) = mpsc::sync_channel(20);
|
let (switch_request_tx, switch_request_rx) = mpsc::sync_channel(20);
|
||||||
let switch_helper = PowerSwitchHelper::new(switch_request_tx, shared_switch_set.clone());
|
let switch_helper = PowerSwitchHelper::new(switch_request_tx, shared_switch_set.clone());
|
||||||
|
|
||||||
let shared_mgm_0_set = Arc::default();
|
let shared_mgm_set = Arc::default();
|
||||||
let shared_mgm_1_set = Arc::default();
|
let mgm_mode_leaf_interface = MpscModeLeafInterface {
|
||||||
let mgm_0_mode_node =
|
request_rx: mgm_handler_mode_rx,
|
||||||
ModeRequestHandlerMpscBounded::new(MGM_HANDLER_0.into(), mgm_0_handler_mode_rx);
|
reply_to_pus_tx: pus_mode_reply_tx.clone(),
|
||||||
let mgm_1_mode_node =
|
reply_to_parent_tx: mgm_handler_mode_reply_to_parent_tx,
|
||||||
ModeRequestHandlerMpscBounded::new(MGM_HANDLER_1.into(), mgm_1_handler_mode_rx);
|
|
||||||
let (mgm_0_spi_interface, mgm_1_spi_interface) =
|
|
||||||
if let Some(sim_client) = opt_sim_client.as_mut() {
|
|
||||||
sim_client
|
|
||||||
.add_reply_recipient(satrs_minisim::SimComponent::Mgm0Lis3Mdl, mgm_0_sim_reply_tx);
|
|
||||||
sim_client
|
|
||||||
.add_reply_recipient(satrs_minisim::SimComponent::Mgm1Lis3Mdl, mgm_1_sim_reply_tx);
|
|
||||||
(
|
|
||||||
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
|
|
||||||
sim_request_tx: sim_request_tx.clone(),
|
|
||||||
sim_reply_rx: mgm_0_sim_reply_rx,
|
|
||||||
}),
|
|
||||||
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
|
|
||||||
sim_request_tx: sim_request_tx.clone(),
|
|
||||||
sim_reply_rx: mgm_1_sim_reply_rx,
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(
|
|
||||||
SpiSimInterfaceWrapper::Dummy(SpiDummyInterface::default()),
|
|
||||||
SpiSimInterfaceWrapper::Dummy(SpiDummyInterface::default()),
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
let mut mgm_0_handler = MgmHandlerLis3Mdl::new(
|
|
||||||
|
let mgm_spi_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
|
||||||
|
sim_client.add_reply_recipient(satrs_minisim::SimComponent::MgmLis3Mdl, mgm_sim_reply_tx);
|
||||||
|
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
|
||||||
|
sim_request_tx: sim_request_tx.clone(),
|
||||||
|
sim_reply_rx: mgm_sim_reply_rx,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
SpiSimInterfaceWrapper::Dummy(SpiDummyInterface::default())
|
||||||
|
};
|
||||||
|
let mut mgm_handler = MgmHandlerLis3Mdl::new(
|
||||||
MGM_HANDLER_0,
|
MGM_HANDLER_0,
|
||||||
"MGM_0",
|
"MGM_0",
|
||||||
mgm_0_mode_node,
|
mgm_mode_leaf_interface,
|
||||||
mgm_0_handler_composite_rx,
|
mgm_handler_composite_rx,
|
||||||
pus_hk_reply_tx.clone(),
|
pus_hk_reply_tx.clone(),
|
||||||
switch_helper.clone(),
|
switch_helper.clone(),
|
||||||
tm_sender.clone(),
|
tm_sink_tx.clone(),
|
||||||
mgm_0_spi_interface,
|
mgm_spi_interface,
|
||||||
shared_mgm_0_set,
|
shared_mgm_set,
|
||||||
);
|
|
||||||
let mut mgm_1_handler = MgmHandlerLis3Mdl::new(
|
|
||||||
MGM_HANDLER_1,
|
|
||||||
"MGM_1",
|
|
||||||
mgm_1_mode_node,
|
|
||||||
mgm_1_handler_composite_rx,
|
|
||||||
pus_hk_reply_tx.clone(),
|
|
||||||
switch_helper.clone(),
|
|
||||||
tm_sender.clone(),
|
|
||||||
mgm_1_spi_interface,
|
|
||||||
shared_mgm_1_set,
|
|
||||||
);
|
|
||||||
// Connect PUS service to device handlers.
|
|
||||||
connect_mode_nodes(
|
|
||||||
&mut pus_stack.mode_srv,
|
|
||||||
mgm_0_handler_mode_tx,
|
|
||||||
&mut mgm_0_handler,
|
|
||||||
pus_mode_reply_tx.clone(),
|
|
||||||
);
|
|
||||||
connect_mode_nodes(
|
|
||||||
&mut pus_stack.mode_srv,
|
|
||||||
mgm_1_handler_mode_tx,
|
|
||||||
&mut mgm_1_handler,
|
|
||||||
pus_mode_reply_tx.clone(),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let (pcdu_handler_mode_reply_to_parent_tx, _pcdu_handler_mode_reply_to_parent_rx) =
|
||||||
|
mpsc::sync_channel(10);
|
||||||
|
let pcdu_mode_leaf_interface = MpscModeLeafInterface {
|
||||||
|
request_rx: pcdu_handler_mode_rx,
|
||||||
|
reply_to_pus_tx: pus_mode_reply_tx,
|
||||||
|
reply_to_parent_tx: pcdu_handler_mode_reply_to_parent_tx,
|
||||||
|
};
|
||||||
let pcdu_serial_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
|
let pcdu_serial_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
|
||||||
sim_client.add_reply_recipient(satrs_minisim::SimComponent::Pcdu, pcdu_sim_reply_tx);
|
sim_client.add_reply_recipient(satrs_minisim::SimComponent::Pcdu, pcdu_sim_reply_tx);
|
||||||
SerialSimInterfaceWrapper::Sim(SerialInterfaceToSim::new(
|
SerialSimInterfaceWrapper::Sim(SerialInterfaceToSim::new(
|
||||||
@ -372,26 +275,18 @@ fn main() {
|
|||||||
} else {
|
} else {
|
||||||
SerialSimInterfaceWrapper::Dummy(SerialInterfaceDummy::default())
|
SerialSimInterfaceWrapper::Dummy(SerialInterfaceDummy::default())
|
||||||
};
|
};
|
||||||
let pcdu_mode_node =
|
|
||||||
ModeRequestHandlerMpscBounded::new(PCDU_HANDLER.into(), pcdu_handler_mode_rx);
|
|
||||||
let mut pcdu_handler = PcduHandler::new(
|
let mut pcdu_handler = PcduHandler::new(
|
||||||
PCDU_HANDLER,
|
PCDU_HANDLER,
|
||||||
"PCDU",
|
"PCDU",
|
||||||
pcdu_mode_node,
|
pcdu_mode_leaf_interface,
|
||||||
pcdu_handler_composite_rx,
|
pcdu_handler_composite_rx,
|
||||||
pus_hk_reply_tx,
|
pus_hk_reply_tx,
|
||||||
switch_request_rx,
|
switch_request_rx,
|
||||||
tm_sender.clone(),
|
tm_sink_tx,
|
||||||
pcdu_serial_interface,
|
pcdu_serial_interface,
|
||||||
shared_switch_set,
|
shared_switch_set,
|
||||||
);
|
);
|
||||||
connect_mode_nodes(
|
|
||||||
&mut pus_stack.mode_srv,
|
|
||||||
pcdu_handler_mode_tx.clone(),
|
|
||||||
&mut pcdu_handler,
|
|
||||||
pus_mode_reply_tx,
|
|
||||||
);
|
|
||||||
|
|
||||||
// The PCDU is a critical component which should be in normal mode immediately.
|
// The PCDU is a critical component which should be in normal mode immediately.
|
||||||
pcdu_handler_mode_tx
|
pcdu_handler_mode_tx
|
||||||
.send(GenericMessage::new(
|
.send(GenericMessage::new(
|
||||||
@ -454,8 +349,7 @@ fn main() {
|
|||||||
let jh_aocs = thread::Builder::new()
|
let jh_aocs = thread::Builder::new()
|
||||||
.name("sat-rs aocs".to_string())
|
.name("sat-rs aocs".to_string())
|
||||||
.spawn(move || loop {
|
.spawn(move || loop {
|
||||||
mgm_0_handler.periodic_operation();
|
mgm_handler.periodic_operation();
|
||||||
mgm_1_handler.periodic_operation();
|
|
||||||
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
|
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -466,13 +360,11 @@ fn main() {
|
|||||||
.spawn(move || loop {
|
.spawn(move || loop {
|
||||||
// TODO: We should introduce something like a fixed timeslot helper to allow a more
|
// TODO: We should introduce something like a fixed timeslot helper to allow a more
|
||||||
// declarative API. It would also be very useful for the AOCS task.
|
// declarative API. It would also be very useful for the AOCS task.
|
||||||
//
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::RegularOp);
|
||||||
// TODO: The fixed timeslot handler exists.. use it.
|
|
||||||
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::RegularOp);
|
|
||||||
thread::sleep(Duration::from_millis(50));
|
thread::sleep(Duration::from_millis(50));
|
||||||
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::PollAndRecvReplies);
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
|
||||||
thread::sleep(Duration::from_millis(50));
|
thread::sleep(Duration::from_millis(50));
|
||||||
pcdu_handler.periodic_operation(crate::eps::pcdu::OpCode::PollAndRecvReplies);
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
|
||||||
thread::sleep(Duration::from_millis(300));
|
thread::sleep(Duration::from_millis(300));
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -508,6 +400,325 @@ fn main() {
|
|||||||
.expect("Joining PUS handler thread failed");
|
.expect("Joining PUS handler thread failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn dyn_tmtc_pool_main() {
|
||||||
|
let (tc_source_tx, tc_source_rx) = mpsc::channel();
|
||||||
|
let (tm_sink_tx, tm_sink_rx) = mpsc::channel();
|
||||||
|
let (tm_server_tx, tm_server_rx) = mpsc::channel();
|
||||||
|
|
||||||
|
let (sim_request_tx, sim_request_rx) = mpsc::channel();
|
||||||
|
let (mgm_sim_reply_tx, mgm_sim_reply_rx) = mpsc::channel();
|
||||||
|
let (pcdu_sim_reply_tx, pcdu_sim_reply_rx) = mpsc::channel();
|
||||||
|
let mut opt_sim_client = create_sim_client(sim_request_rx);
|
||||||
|
|
||||||
|
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||||
|
let (mgm_handler_composite_tx, mgm_handler_composite_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(5);
|
||||||
|
let (pcdu_handler_composite_tx, pcdu_handler_composite_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<CompositeRequest>>(10);
|
||||||
|
let (mgm_handler_mode_tx, mgm_handler_mode_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<ModeRequest>>(5);
|
||||||
|
let (pcdu_handler_mode_tx, pcdu_handler_mode_rx) =
|
||||||
|
mpsc::sync_channel::<GenericMessage<ModeRequest>>(10);
|
||||||
|
|
||||||
|
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||||
|
let mut request_map = GenericRequestRouter::default();
|
||||||
|
request_map
|
||||||
|
.composite_router_map
|
||||||
|
.insert(MGM_HANDLER_0.id(), mgm_handler_composite_tx);
|
||||||
|
request_map
|
||||||
|
.mode_router_map
|
||||||
|
.insert(MGM_HANDLER_0.id(), mgm_handler_mode_tx);
|
||||||
|
request_map
|
||||||
|
.composite_router_map
|
||||||
|
.insert(PCDU_HANDLER.id(), pcdu_handler_composite_tx);
|
||||||
|
request_map
|
||||||
|
.mode_router_map
|
||||||
|
.insert(PCDU_HANDLER.id(), pcdu_handler_mode_tx.clone());
|
||||||
|
|
||||||
|
// Create event handling components
|
||||||
|
// These sender handles are used to send event requests, for example to enable or disable
|
||||||
|
// certain events.
|
||||||
|
let (event_tx, event_rx) = mpsc::sync_channel(100);
|
||||||
|
let (event_request_tx, event_request_rx) = mpsc::channel::<EventRequestWithToken>();
|
||||||
|
// The event task is the core handler to perform the event routing and TM handling as specified
|
||||||
|
// in the sat-rs documentation.
|
||||||
|
let mut event_handler = EventHandler::new(tm_sink_tx.clone(), event_rx, event_request_rx);
|
||||||
|
|
||||||
|
let (pus_test_tx, pus_test_rx) = mpsc::channel();
|
||||||
|
let (pus_event_tx, pus_event_rx) = mpsc::channel();
|
||||||
|
let (pus_sched_tx, pus_sched_rx) = mpsc::channel();
|
||||||
|
let (pus_hk_tx, pus_hk_rx) = mpsc::channel();
|
||||||
|
let (pus_action_tx, pus_action_rx) = mpsc::channel();
|
||||||
|
let (pus_mode_tx, pus_mode_rx) = mpsc::channel();
|
||||||
|
|
||||||
|
let (_pus_action_reply_tx, pus_action_reply_rx) = mpsc::channel();
|
||||||
|
let (pus_hk_reply_tx, pus_hk_reply_rx) = mpsc::channel();
|
||||||
|
let (pus_mode_reply_tx, pus_mode_reply_rx) = mpsc::channel();
|
||||||
|
|
||||||
|
let pus_router = PusTcMpscRouter {
|
||||||
|
test_tc_sender: pus_test_tx,
|
||||||
|
event_tc_sender: pus_event_tx,
|
||||||
|
sched_tc_sender: pus_sched_tx,
|
||||||
|
hk_tc_sender: pus_hk_tx,
|
||||||
|
action_tc_sender: pus_action_tx,
|
||||||
|
mode_tc_sender: pus_mode_tx,
|
||||||
|
};
|
||||||
|
|
||||||
|
let pus_test_service =
|
||||||
|
create_test_service_dynamic(tm_sink_tx.clone(), event_tx.clone(), pus_test_rx);
|
||||||
|
let pus_scheduler_service = create_scheduler_service_dynamic(
|
||||||
|
tm_sink_tx.clone(),
|
||||||
|
tc_source_tx.clone(),
|
||||||
|
pus_sched_rx,
|
||||||
|
create_sched_tc_pool(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let pus_event_service =
|
||||||
|
create_event_service_dynamic(tm_sink_tx.clone(), pus_event_rx, event_request_tx);
|
||||||
|
let pus_action_service = create_action_service_dynamic(
|
||||||
|
tm_sink_tx.clone(),
|
||||||
|
pus_action_rx,
|
||||||
|
request_map.clone(),
|
||||||
|
pus_action_reply_rx,
|
||||||
|
);
|
||||||
|
let pus_hk_service = create_hk_service_dynamic(
|
||||||
|
tm_sink_tx.clone(),
|
||||||
|
pus_hk_rx,
|
||||||
|
request_map.clone(),
|
||||||
|
pus_hk_reply_rx,
|
||||||
|
);
|
||||||
|
let pus_mode_service = create_mode_service_dynamic(
|
||||||
|
tm_sink_tx.clone(),
|
||||||
|
pus_mode_rx,
|
||||||
|
request_map,
|
||||||
|
pus_mode_reply_rx,
|
||||||
|
);
|
||||||
|
let mut pus_stack = PusStack::new(
|
||||||
|
pus_test_service,
|
||||||
|
pus_hk_service,
|
||||||
|
pus_event_service,
|
||||||
|
pus_action_service,
|
||||||
|
pus_scheduler_service,
|
||||||
|
pus_mode_service,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut tmtc_task = TcSourceTaskDynamic::new(
|
||||||
|
tc_source_rx,
|
||||||
|
PusTcDistributor::new(tm_sink_tx.clone(), pus_router),
|
||||||
|
);
|
||||||
|
|
||||||
|
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
||||||
|
let udp_tc_server = UdpTcServer::new(UDP_SERVER.id(), sock_addr, 2048, tc_source_tx.clone())
|
||||||
|
.expect("creating UDP TMTC server failed");
|
||||||
|
let mut udp_tmtc_server = UdpTmtcServer {
|
||||||
|
udp_tc_server,
|
||||||
|
tm_handler: DynamicUdpTmHandler {
|
||||||
|
tm_rx: tm_server_rx,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let tcp_server_cfg = ServerConfig::new(
|
||||||
|
TCP_SERVER.id(),
|
||||||
|
sock_addr,
|
||||||
|
Duration::from_millis(400),
|
||||||
|
4096,
|
||||||
|
8192,
|
||||||
|
);
|
||||||
|
let sync_tm_tcp_source = SyncTcpTmSource::new(200);
|
||||||
|
let mut tcp_server = TcpTask::new(
|
||||||
|
tcp_server_cfg,
|
||||||
|
sync_tm_tcp_source.clone(),
|
||||||
|
tc_source_tx.clone(),
|
||||||
|
PACKET_ID_VALIDATOR.clone(),
|
||||||
|
)
|
||||||
|
.expect("tcp server creation failed");
|
||||||
|
|
||||||
|
let mut tm_funnel = TmSinkDynamic::new(sync_tm_tcp_source, tm_sink_rx, tm_server_tx);
|
||||||
|
|
||||||
|
let shared_switch_set = Arc::new(Mutex::default());
|
||||||
|
let (switch_request_tx, switch_request_rx) = mpsc::sync_channel(20);
|
||||||
|
let switch_helper = PowerSwitchHelper::new(switch_request_tx, shared_switch_set.clone());
|
||||||
|
|
||||||
|
let (mgm_handler_mode_reply_to_parent_tx, _mgm_handler_mode_reply_to_parent_rx) =
|
||||||
|
mpsc::sync_channel(5);
|
||||||
|
let shared_mgm_set = Arc::default();
|
||||||
|
let mode_leaf_interface = MpscModeLeafInterface {
|
||||||
|
request_rx: mgm_handler_mode_rx,
|
||||||
|
reply_to_pus_tx: pus_mode_reply_tx.clone(),
|
||||||
|
reply_to_parent_tx: mgm_handler_mode_reply_to_parent_tx,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mgm_spi_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
|
||||||
|
sim_client.add_reply_recipient(satrs_minisim::SimComponent::MgmLis3Mdl, mgm_sim_reply_tx);
|
||||||
|
SpiSimInterfaceWrapper::Sim(SpiSimInterface {
|
||||||
|
sim_request_tx: sim_request_tx.clone(),
|
||||||
|
sim_reply_rx: mgm_sim_reply_rx,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
SpiSimInterfaceWrapper::Dummy(SpiDummyInterface::default())
|
||||||
|
};
|
||||||
|
let mut mgm_handler = MgmHandlerLis3Mdl::new(
|
||||||
|
MGM_HANDLER_0,
|
||||||
|
"MGM_0",
|
||||||
|
mode_leaf_interface,
|
||||||
|
mgm_handler_composite_rx,
|
||||||
|
pus_hk_reply_tx.clone(),
|
||||||
|
switch_helper.clone(),
|
||||||
|
tm_sink_tx.clone(),
|
||||||
|
mgm_spi_interface,
|
||||||
|
shared_mgm_set,
|
||||||
|
);
|
||||||
|
|
||||||
|
let (pcdu_handler_mode_reply_to_parent_tx, _pcdu_handler_mode_reply_to_parent_rx) =
|
||||||
|
mpsc::sync_channel(10);
|
||||||
|
let pcdu_mode_leaf_interface = MpscModeLeafInterface {
|
||||||
|
request_rx: pcdu_handler_mode_rx,
|
||||||
|
reply_to_pus_tx: pus_mode_reply_tx,
|
||||||
|
reply_to_parent_tx: pcdu_handler_mode_reply_to_parent_tx,
|
||||||
|
};
|
||||||
|
let pcdu_serial_interface = if let Some(sim_client) = opt_sim_client.as_mut() {
|
||||||
|
sim_client.add_reply_recipient(satrs_minisim::SimComponent::Pcdu, pcdu_sim_reply_tx);
|
||||||
|
SerialSimInterfaceWrapper::Sim(SerialInterfaceToSim::new(
|
||||||
|
sim_request_tx.clone(),
|
||||||
|
pcdu_sim_reply_rx,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
SerialSimInterfaceWrapper::Dummy(SerialInterfaceDummy::default())
|
||||||
|
};
|
||||||
|
let mut pcdu_handler = PcduHandler::new(
|
||||||
|
PCDU_HANDLER,
|
||||||
|
"PCDU",
|
||||||
|
pcdu_mode_leaf_interface,
|
||||||
|
pcdu_handler_composite_rx,
|
||||||
|
pus_hk_reply_tx,
|
||||||
|
switch_request_rx,
|
||||||
|
tm_sink_tx,
|
||||||
|
pcdu_serial_interface,
|
||||||
|
shared_switch_set,
|
||||||
|
);
|
||||||
|
// The PCDU is a critical component which should be in normal mode immediately.
|
||||||
|
pcdu_handler_mode_tx
|
||||||
|
.send(GenericMessage::new(
|
||||||
|
MessageMetadata::new(0, NO_SENDER),
|
||||||
|
ModeRequest::SetMode {
|
||||||
|
mode_and_submode: ModeAndSubmode::new(DeviceMode::Normal as Mode, 0),
|
||||||
|
forced: false,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.expect("sending initial mode request failed");
|
||||||
|
|
||||||
|
info!("Starting TMTC and UDP task");
|
||||||
|
let jh_udp_tmtc = thread::Builder::new()
|
||||||
|
.name("sat-rs tmtc-udp".to_string())
|
||||||
|
.spawn(move || {
|
||||||
|
info!("Running UDP server on port {SERVER_PORT}");
|
||||||
|
loop {
|
||||||
|
udp_tmtc_server.periodic_operation();
|
||||||
|
tmtc_task.periodic_operation();
|
||||||
|
thread::sleep(Duration::from_millis(FREQ_MS_UDP_TMTC));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
info!("Starting TCP task");
|
||||||
|
let jh_tcp = thread::Builder::new()
|
||||||
|
.name("sat-rs tcp".to_string())
|
||||||
|
.spawn(move || {
|
||||||
|
info!("Running TCP server on port {SERVER_PORT}");
|
||||||
|
loop {
|
||||||
|
tcp_server.periodic_operation();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
info!("Starting TM funnel task");
|
||||||
|
let jh_tm_funnel = thread::Builder::new()
|
||||||
|
.name("sat-rs tm-sink".to_string())
|
||||||
|
.spawn(move || loop {
|
||||||
|
tm_funnel.operation();
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut opt_jh_sim_client = None;
|
||||||
|
if let Some(mut sim_client) = opt_sim_client {
|
||||||
|
info!("Starting UDP sim client task");
|
||||||
|
opt_jh_sim_client = Some(
|
||||||
|
thread::Builder::new()
|
||||||
|
.name("sat-rs sim adapter".to_string())
|
||||||
|
.spawn(move || loop {
|
||||||
|
if sim_client.operation() == HandlingStatus::Empty {
|
||||||
|
std::thread::sleep(Duration::from_millis(SIM_CLIENT_IDLE_DELAY_MS));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Starting AOCS thread");
|
||||||
|
let jh_aocs = thread::Builder::new()
|
||||||
|
.name("sat-rs aocs".to_string())
|
||||||
|
.spawn(move || loop {
|
||||||
|
mgm_handler.periodic_operation();
|
||||||
|
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
info!("Starting EPS thread");
|
||||||
|
let jh_eps = thread::Builder::new()
|
||||||
|
.name("sat-rs eps".to_string())
|
||||||
|
.spawn(move || loop {
|
||||||
|
// TODO: We should introduce something like a fixed timeslot helper to allow a more
|
||||||
|
// declarative API. It would also be very useful for the AOCS task.
|
||||||
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::RegularOp);
|
||||||
|
thread::sleep(Duration::from_millis(50));
|
||||||
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
|
||||||
|
thread::sleep(Duration::from_millis(50));
|
||||||
|
pcdu_handler.periodic_operation(eps::pcdu::OpCode::PollAndRecvReplies);
|
||||||
|
thread::sleep(Duration::from_millis(300));
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
info!("Starting PUS handler thread");
|
||||||
|
let jh_pus_handler = thread::Builder::new()
|
||||||
|
.name("sat-rs pus".to_string())
|
||||||
|
.spawn(move || loop {
|
||||||
|
pus_stack.periodic_operation();
|
||||||
|
event_handler.periodic_operation();
|
||||||
|
thread::sleep(Duration::from_millis(FREQ_MS_PUS_STACK));
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
jh_udp_tmtc
|
||||||
|
.join()
|
||||||
|
.expect("Joining UDP TMTC server thread failed");
|
||||||
|
jh_tcp
|
||||||
|
.join()
|
||||||
|
.expect("Joining TCP TMTC server thread failed");
|
||||||
|
jh_tm_funnel
|
||||||
|
.join()
|
||||||
|
.expect("Joining TM Funnel thread failed");
|
||||||
|
if let Some(jh_sim_client) = opt_jh_sim_client {
|
||||||
|
jh_sim_client
|
||||||
|
.join()
|
||||||
|
.expect("Joining SIM client thread failed");
|
||||||
|
}
|
||||||
|
jh_aocs.join().expect("Joining AOCS thread failed");
|
||||||
|
jh_eps.join().expect("Joining EPS thread failed");
|
||||||
|
jh_pus_handler
|
||||||
|
.join()
|
||||||
|
.expect("Joining PUS handler thread failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
setup_logger().expect("setting up logging with fern failed");
|
||||||
|
println!("Running OBSW example");
|
||||||
|
#[cfg(not(feature = "dyn_tmtc"))]
|
||||||
|
static_tmtc_pool_main();
|
||||||
|
#[cfg(feature = "dyn_tmtc")]
|
||||||
|
dyn_tmtc_pool_main();
|
||||||
|
}
|
||||||
|
|
||||||
pub fn update_time(time_provider: &mut CdsTime, timestamp: &mut [u8]) {
|
pub fn update_time(time_provider: &mut CdsTime, timestamp: &mut [u8]) {
|
||||||
time_provider
|
time_provider
|
||||||
.update_from_now()
|
.update_from_now()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use log::warn;
|
use log::warn;
|
||||||
use satrs::action::{ActionRequest, ActionRequestVariant};
|
use satrs::action::{ActionRequest, ActionRequestVariant};
|
||||||
|
use satrs::pool::SharedStaticMemoryPool;
|
||||||
use satrs::pus::action::{
|
use satrs::pus::action::{
|
||||||
ActionReplyPus, ActionReplyVariant, ActivePusActionRequestStd, DefaultActiveActionRequestMap,
|
ActionReplyPus, ActionReplyVariant, ActivePusActionRequestStd, DefaultActiveActionRequestMap,
|
||||||
};
|
};
|
||||||
@ -9,20 +10,21 @@ use satrs::pus::verification::{
|
|||||||
VerificationReportingProvider, VerificationToken,
|
VerificationReportingProvider, VerificationToken,
|
||||||
};
|
};
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter, EcssTmSender, EcssTmtcError,
|
ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
|
||||||
GenericConversionError, MpscTcReceiver, PusPacketHandlingError, PusReplyHandler,
|
EcssTcInVecConverter, EcssTmSender, EcssTmtcError, GenericConversionError, MpscTcReceiver,
|
||||||
PusServiceHelper, PusTcToRequestConverter,
|
MpscTmAsVecSender, PusPacketHandlingError, PusReplyHandler, PusServiceHelper,
|
||||||
|
PusTcToRequestConverter,
|
||||||
};
|
};
|
||||||
use satrs::request::{GenericMessage, UniqueApidTargetId};
|
use satrs::request::{GenericMessage, UniqueApidTargetId};
|
||||||
use satrs::spacepackets::ecss::tc::PusTcReader;
|
use satrs::spacepackets::ecss::tc::PusTcReader;
|
||||||
use satrs::spacepackets::ecss::{EcssEnumU16, PusPacket, PusServiceId};
|
use satrs::spacepackets::ecss::{EcssEnumU16, PusPacket, PusServiceId};
|
||||||
use satrs_example::config::pus::PUS_ACTION_SERVICE;
|
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
|
||||||
|
use satrs_example::config::components::PUS_ACTION_SERVICE;
|
||||||
use satrs_example::config::tmtc_err;
|
use satrs_example::config::tmtc_err;
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::requests::GenericRequestRouter;
|
use crate::requests::GenericRequestRouter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
create_verification_reporter, generic_pus_request_timeout_handler, HandlingStatus,
|
create_verification_reporter, generic_pus_request_timeout_handler, HandlingStatus,
|
||||||
@ -205,20 +207,20 @@ impl PusTcToRequestConverter<ActivePusActionRequestStd, ActionRequest> for Actio
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_action_service(
|
pub fn create_action_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tc_in_mem_converter: EcssTcInMemConverter,
|
tc_pool: SharedStaticMemoryPool,
|
||||||
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
action_router: GenericRequestRouter,
|
action_router: GenericRequestRouter,
|
||||||
reply_receiver: mpsc::Receiver<GenericMessage<ActionReplyPus>>,
|
reply_receiver: mpsc::Receiver<GenericMessage<ActionReplyPus>>,
|
||||||
) -> ActionServiceWrapper {
|
) -> ActionServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let action_request_handler = PusTargetedRequestService::new(
|
let action_request_handler = PusTargetedRequestService::new(
|
||||||
PusServiceHelper::new(
|
PusServiceHelper::new(
|
||||||
PUS_ACTION_SERVICE.id(),
|
PUS_ACTION_SERVICE.id(),
|
||||||
pus_action_rx,
|
pus_action_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
|
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
|
||||||
tc_in_mem_converter,
|
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
|
||||||
),
|
),
|
||||||
ActionRequestConverter::default(),
|
ActionRequestConverter::default(),
|
||||||
// TODO: Implementation which does not use run-time allocation? Maybe something like
|
// TODO: Implementation which does not use run-time allocation? Maybe something like
|
||||||
@ -233,9 +235,36 @@ pub fn create_action_service(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ActionServiceWrapper {
|
pub fn create_action_service_dynamic(
|
||||||
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
|
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
|
action_router: GenericRequestRouter,
|
||||||
|
reply_receiver: mpsc::Receiver<GenericMessage<ActionReplyPus>>,
|
||||||
|
) -> ActionServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
|
let action_request_handler = PusTargetedRequestService::new(
|
||||||
|
PusServiceHelper::new(
|
||||||
|
PUS_ACTION_SERVICE.id(),
|
||||||
|
pus_action_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_ACTION_SERVICE.id(), PUS_ACTION_SERVICE.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
),
|
||||||
|
ActionRequestConverter::default(),
|
||||||
|
DefaultActiveActionRequestMap::default(),
|
||||||
|
ActionReplyHandler::default(),
|
||||||
|
action_router,
|
||||||
|
reply_receiver,
|
||||||
|
);
|
||||||
|
ActionServiceWrapper {
|
||||||
|
service: action_request_handler,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ActionServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
|
||||||
pub(crate) service: PusTargetedRequestService<
|
pub(crate) service: PusTargetedRequestService<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
|
TmSender,
|
||||||
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
ActionRequestConverter,
|
ActionRequestConverter,
|
||||||
ActionReplyHandler,
|
ActionReplyHandler,
|
||||||
@ -246,7 +275,9 @@ pub struct ActionServiceWrapper {
|
|||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetedPusService for ActionServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
|
||||||
|
for ActionServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
const SERVICE_ID: u8 = PusServiceId::Action as u8;
|
const SERVICE_ID: u8 = PusServiceId::Action as u8;
|
||||||
const SERVICE_STR: &'static str = "action";
|
const SERVICE_STR: &'static str = "action";
|
||||||
|
|
||||||
@ -272,10 +303,9 @@ mod tests {
|
|||||||
use satrs::pus::test_util::{
|
use satrs::pus::test_util::{
|
||||||
TEST_APID, TEST_COMPONENT_ID_0, TEST_COMPONENT_ID_1, TEST_UNIQUE_ID_0, TEST_UNIQUE_ID_1,
|
TEST_APID, TEST_COMPONENT_ID_0, TEST_COMPONENT_ID_1, TEST_UNIQUE_ID_0, TEST_UNIQUE_ID_1,
|
||||||
};
|
};
|
||||||
|
use satrs::pus::verification;
|
||||||
use satrs::pus::verification::test_util::TestVerificationReporter;
|
use satrs::pus::verification::test_util::TestVerificationReporter;
|
||||||
use satrs::pus::{verification, EcssTcInVecConverter};
|
|
||||||
use satrs::request::MessageMetadata;
|
use satrs::request::MessageMetadata;
|
||||||
use satrs::tmtc::PacketAsVec;
|
|
||||||
use satrs::ComponentId;
|
use satrs::ComponentId;
|
||||||
use satrs::{
|
use satrs::{
|
||||||
res_code::ResultU16,
|
res_code::ResultU16,
|
||||||
@ -308,7 +338,7 @@ mod tests {
|
|||||||
{
|
{
|
||||||
pub fn new_for_action(owner_id: ComponentId, target_id: ComponentId) -> Self {
|
pub fn new_for_action(owner_id: ComponentId, target_id: ComponentId) -> Self {
|
||||||
let _ = env_logger::builder().is_test(true).try_init();
|
let _ = env_logger::builder().is_test(true).try_init();
|
||||||
let (tm_funnel_tx, tm_funnel_rx) = mpsc::sync_channel(5);
|
let (tm_funnel_tx, tm_funnel_rx) = mpsc::channel();
|
||||||
let (pus_action_tx, pus_action_rx) = mpsc::channel();
|
let (pus_action_tx, pus_action_rx) = mpsc::channel();
|
||||||
let (action_reply_tx, action_reply_rx) = mpsc::channel();
|
let (action_reply_tx, action_reply_rx) = mpsc::channel();
|
||||||
let (action_req_tx, action_req_rx) = mpsc::sync_channel(10);
|
let (action_req_tx, action_req_rx) = mpsc::sync_channel(10);
|
||||||
@ -322,9 +352,9 @@ mod tests {
|
|||||||
PusServiceHelper::new(
|
PusServiceHelper::new(
|
||||||
owner_id,
|
owner_id,
|
||||||
pus_action_rx,
|
pus_action_rx,
|
||||||
TmTcSender::Heap(tm_funnel_tx.clone()),
|
tm_funnel_tx.clone(),
|
||||||
verif_reporter,
|
verif_reporter,
|
||||||
EcssTcInMemConverter::Heap(EcssTcInVecConverter::default()),
|
EcssTcInVecConverter::default(),
|
||||||
),
|
),
|
||||||
ActionRequestConverter::default(),
|
ActionRequestConverter::default(),
|
||||||
DefaultActiveActionRequestMap::default(),
|
DefaultActiveActionRequestMap::default(),
|
||||||
|
@ -1,32 +1,34 @@
|
|||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
|
|
||||||
use crate::pus::create_verification_reporter;
|
use crate::pus::create_verification_reporter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
use satrs::pool::SharedStaticMemoryPool;
|
||||||
use satrs::pus::event_man::EventRequestWithToken;
|
use satrs::pus::event_man::EventRequestWithToken;
|
||||||
use satrs::pus::event_srv::PusEventServiceHandler;
|
use satrs::pus::event_srv::PusEventServiceHandler;
|
||||||
use satrs::pus::verification::VerificationReporter;
|
use satrs::pus::verification::VerificationReporter;
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
|
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter,
|
||||||
PartialPusHandlingError, PusServiceHelper,
|
EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender, MpscTcReceiver,
|
||||||
|
MpscTmAsVecSender, PartialPusHandlingError, PusServiceHelper,
|
||||||
};
|
};
|
||||||
use satrs::spacepackets::ecss::PusServiceId;
|
use satrs::spacepackets::ecss::PusServiceId;
|
||||||
use satrs_example::config::pus::PUS_EVENT_MANAGEMENT;
|
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
|
||||||
|
use satrs_example::config::components::PUS_EVENT_MANAGEMENT;
|
||||||
|
|
||||||
use super::{DirectPusService, HandlingStatus};
|
use super::{DirectPusService, HandlingStatus};
|
||||||
|
|
||||||
pub fn create_event_service(
|
pub fn create_event_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tm_in_pool_converter: EcssTcInMemConverter,
|
tc_pool: SharedStaticMemoryPool,
|
||||||
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
event_request_tx: mpsc::Sender<EventRequestWithToken>,
|
event_request_tx: mpsc::Sender<EventRequestWithToken>,
|
||||||
) -> EventServiceWrapper {
|
) -> EventServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let pus_5_handler = PusEventServiceHandler::new(
|
let pus_5_handler = PusEventServiceHandler::new(
|
||||||
PusServiceHelper::new(
|
PusServiceHelper::new(
|
||||||
PUS_EVENT_MANAGEMENT.id(),
|
PUS_EVENT_MANAGEMENT.id(),
|
||||||
pus_event_rx,
|
pus_event_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
|
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
|
||||||
tm_in_pool_converter,
|
EcssTcInSharedStoreConverter::new(tc_pool.clone(), 2048),
|
||||||
),
|
),
|
||||||
event_request_tx,
|
event_request_tx,
|
||||||
);
|
);
|
||||||
@ -35,16 +37,34 @@ pub fn create_event_service(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct EventServiceWrapper {
|
pub fn create_event_service_dynamic(
|
||||||
pub handler: PusEventServiceHandler<
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
MpscTcReceiver,
|
pus_event_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
TmTcSender,
|
event_request_tx: mpsc::Sender<EventRequestWithToken>,
|
||||||
EcssTcInMemConverter,
|
) -> EventServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
VerificationReporter,
|
let pus_5_handler = PusEventServiceHandler::new(
|
||||||
>,
|
PusServiceHelper::new(
|
||||||
|
PUS_EVENT_MANAGEMENT.id(),
|
||||||
|
pus_event_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_EVENT_MANAGEMENT.id(), PUS_EVENT_MANAGEMENT.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
),
|
||||||
|
event_request_tx,
|
||||||
|
);
|
||||||
|
EventServiceWrapper {
|
||||||
|
handler: pus_5_handler,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DirectPusService for EventServiceWrapper {
|
pub struct EventServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
|
||||||
|
pub handler:
|
||||||
|
PusEventServiceHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
|
||||||
|
for EventServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
const SERVICE_ID: u8 = PusServiceId::Event as u8;
|
const SERVICE_ID: u8 = PusServiceId::Event as u8;
|
||||||
|
|
||||||
const SERVICE_STR: &'static str = "events";
|
const SERVICE_STR: &'static str = "events";
|
||||||
|
@ -1,26 +1,28 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use satrs::hk::{CollectionIntervalFactor, HkRequest, HkRequestVariant, UniqueId};
|
use satrs::hk::{CollectionIntervalFactor, HkRequest, HkRequestVariant, UniqueId};
|
||||||
|
use satrs::pool::SharedStaticMemoryPool;
|
||||||
use satrs::pus::verification::{
|
use satrs::pus::verification::{
|
||||||
FailParams, TcStateAccepted, TcStateStarted, VerificationReporter,
|
FailParams, TcStateAccepted, TcStateStarted, VerificationReporter,
|
||||||
VerificationReportingProvider, VerificationToken,
|
VerificationReportingProvider, VerificationToken,
|
||||||
};
|
};
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
ActivePusRequestStd, ActiveRequestProvider, DefaultActiveRequestMap, EcssTcAndToken,
|
ActivePusRequestStd, ActiveRequestProvider, DefaultActiveRequestMap, EcssTcAndToken,
|
||||||
EcssTcInMemConverter, EcssTmSender, EcssTmtcError, GenericConversionError, MpscTcReceiver,
|
EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender,
|
||||||
|
EcssTmtcError, GenericConversionError, MpscTcReceiver, MpscTmAsVecSender,
|
||||||
PusPacketHandlingError, PusReplyHandler, PusServiceHelper, PusTcToRequestConverter,
|
PusPacketHandlingError, PusReplyHandler, PusServiceHelper, PusTcToRequestConverter,
|
||||||
};
|
};
|
||||||
use satrs::request::{GenericMessage, UniqueApidTargetId};
|
use satrs::request::{GenericMessage, UniqueApidTargetId};
|
||||||
use satrs::res_code::ResultU16;
|
use satrs::res_code::ResultU16;
|
||||||
use satrs::spacepackets::ecss::tc::PusTcReader;
|
use satrs::spacepackets::ecss::tc::PusTcReader;
|
||||||
use satrs::spacepackets::ecss::{hk, PusPacket, PusServiceId};
|
use satrs::spacepackets::ecss::{hk, PusPacket, PusServiceId};
|
||||||
use satrs_example::config::pus::PUS_HK_SERVICE;
|
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
|
||||||
|
use satrs_example::config::components::PUS_HK_SERVICE;
|
||||||
use satrs_example::config::{hk_err, tmtc_err};
|
use satrs_example::config::{hk_err, tmtc_err};
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::pus::{create_verification_reporter, generic_pus_request_timeout_handler};
|
use crate::pus::{create_verification_reporter, generic_pus_request_timeout_handler};
|
||||||
use crate::requests::GenericRequestRouter;
|
use crate::requests::GenericRequestRouter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
|
|
||||||
use super::{HandlingStatus, PusTargetedRequestService, TargetedPusService};
|
use super::{HandlingStatus, PusTargetedRequestService, TargetedPusService};
|
||||||
|
|
||||||
@ -240,20 +242,20 @@ impl PusTcToRequestConverter<ActivePusRequestStd, HkRequest> for HkRequestConver
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_hk_service(
|
pub fn create_hk_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tc_in_mem_converter: EcssTcInMemConverter,
|
tc_pool: SharedStaticMemoryPool,
|
||||||
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
request_router: GenericRequestRouter,
|
request_router: GenericRequestRouter,
|
||||||
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
|
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
|
||||||
) -> HkServiceWrapper {
|
) -> HkServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let pus_3_handler = PusTargetedRequestService::new(
|
let pus_3_handler = PusTargetedRequestService::new(
|
||||||
PusServiceHelper::new(
|
PusServiceHelper::new(
|
||||||
PUS_HK_SERVICE.id(),
|
PUS_HK_SERVICE.id(),
|
||||||
pus_hk_rx,
|
pus_hk_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
|
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
|
||||||
tc_in_mem_converter,
|
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
|
||||||
),
|
),
|
||||||
HkRequestConverter::default(),
|
HkRequestConverter::default(),
|
||||||
DefaultActiveRequestMap::default(),
|
DefaultActiveRequestMap::default(),
|
||||||
@ -266,9 +268,36 @@ pub fn create_hk_service(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct HkServiceWrapper {
|
pub fn create_hk_service_dynamic(
|
||||||
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
|
pus_hk_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
|
request_router: GenericRequestRouter,
|
||||||
|
reply_receiver: mpsc::Receiver<GenericMessage<HkReply>>,
|
||||||
|
) -> HkServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
|
let pus_3_handler = PusTargetedRequestService::new(
|
||||||
|
PusServiceHelper::new(
|
||||||
|
PUS_HK_SERVICE.id(),
|
||||||
|
pus_hk_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_HK_SERVICE.id(), PUS_HK_SERVICE.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
),
|
||||||
|
HkRequestConverter::default(),
|
||||||
|
DefaultActiveRequestMap::default(),
|
||||||
|
HkReplyHandler::default(),
|
||||||
|
request_router,
|
||||||
|
reply_receiver,
|
||||||
|
);
|
||||||
|
HkServiceWrapper {
|
||||||
|
service: pus_3_handler,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HkServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
|
||||||
pub(crate) service: PusTargetedRequestService<
|
pub(crate) service: PusTargetedRequestService<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
|
TmSender,
|
||||||
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
HkRequestConverter,
|
HkRequestConverter,
|
||||||
HkReplyHandler,
|
HkReplyHandler,
|
||||||
@ -279,7 +308,9 @@ pub struct HkServiceWrapper {
|
|||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetedPusService for HkServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
|
||||||
|
for HkServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
const SERVICE_ID: u8 = PusServiceId::Housekeeping as u8;
|
const SERVICE_ID: u8 = PusServiceId::Housekeeping as u8;
|
||||||
const SERVICE_STR: &'static str = "housekeeping";
|
const SERVICE_STR: &'static str = "housekeeping";
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use crate::requests::GenericRequestRouter;
|
use crate::requests::GenericRequestRouter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use satrs::pool::PoolAddr;
|
use satrs::pool::PoolAddr;
|
||||||
use satrs::pus::verification::{
|
use satrs::pus::verification::{
|
||||||
@ -7,10 +6,10 @@ use satrs::pus::verification::{
|
|||||||
VerificationReporterCfg, VerificationReportingProvider, VerificationToken,
|
VerificationReporterCfg, VerificationReportingProvider, VerificationToken,
|
||||||
};
|
};
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
ActiveRequestMapProvider, ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConversionProvider,
|
ActiveRequestMapProvider, ActiveRequestProvider, EcssTcAndToken, EcssTcInMemConverter,
|
||||||
EcssTcInMemConverter, EcssTcReceiver, EcssTmSender, EcssTmtcError, GenericConversionError,
|
EcssTcReceiver, EcssTmSender, EcssTmtcError, GenericConversionError, GenericRoutingError,
|
||||||
GenericRoutingError, HandlingStatus, PusPacketHandlingError, PusReplyHandler, PusRequestRouter,
|
HandlingStatus, PusPacketHandlingError, PusReplyHandler, PusRequestRouter, PusServiceHelper,
|
||||||
PusServiceHelper, PusTcToRequestConverter, TcInMemory,
|
PusTcToRequestConverter, TcInMemory,
|
||||||
};
|
};
|
||||||
use satrs::queue::{GenericReceiveError, GenericSendError};
|
use satrs::queue::{GenericReceiveError, GenericSendError};
|
||||||
use satrs::request::{Apid, GenericMessage, MessageMetadata};
|
use satrs::request::{Apid, GenericMessage, MessageMetadata};
|
||||||
@ -18,11 +17,11 @@ use satrs::spacepackets::ecss::tc::PusTcReader;
|
|||||||
use satrs::spacepackets::ecss::{PusPacket, PusServiceId};
|
use satrs::spacepackets::ecss::{PusPacket, PusServiceId};
|
||||||
use satrs::tmtc::{PacketAsVec, PacketInPool};
|
use satrs::tmtc::{PacketAsVec, PacketInPool};
|
||||||
use satrs::ComponentId;
|
use satrs::ComponentId;
|
||||||
use satrs_example::config::pus::PUS_ROUTING_SERVICE;
|
use satrs_example::config::components::PUS_ROUTING_SERVICE;
|
||||||
use satrs_example::config::{tmtc_err, CustomPusServiceId};
|
use satrs_example::config::{tmtc_err, CustomPusServiceId};
|
||||||
use satrs_example::TimestampHelper;
|
use satrs_example::TimestampHelper;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc::{self, Sender};
|
||||||
|
|
||||||
pub mod action;
|
pub mod action;
|
||||||
pub mod event;
|
pub mod event;
|
||||||
@ -41,26 +40,26 @@ pub fn create_verification_reporter(owner_id: ComponentId, apid: Apid) -> Verifi
|
|||||||
|
|
||||||
/// Simple router structure which forwards PUS telecommands to dedicated handlers.
|
/// Simple router structure which forwards PUS telecommands to dedicated handlers.
|
||||||
pub struct PusTcMpscRouter {
|
pub struct PusTcMpscRouter {
|
||||||
pub test_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub test_tc_sender: Sender<EcssTcAndToken>,
|
||||||
pub event_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub event_tc_sender: Sender<EcssTcAndToken>,
|
||||||
pub sched_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub sched_tc_sender: Sender<EcssTcAndToken>,
|
||||||
pub hk_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub hk_tc_sender: Sender<EcssTcAndToken>,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub action_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub action_tc_sender: Sender<EcssTcAndToken>,
|
||||||
pub mode_tc_sender: mpsc::SyncSender<EcssTcAndToken>,
|
pub mode_tc_sender: Sender<EcssTcAndToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PusTcDistributor {
|
pub struct PusTcDistributor<TmSender: EcssTmSender> {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub id: ComponentId,
|
pub id: ComponentId,
|
||||||
pub tm_sender: TmTcSender,
|
pub tm_sender: TmSender,
|
||||||
pub verif_reporter: VerificationReporter,
|
pub verif_reporter: VerificationReporter,
|
||||||
pub pus_router: PusTcMpscRouter,
|
pub pus_router: PusTcMpscRouter,
|
||||||
stamp_helper: TimestampHelper,
|
stamp_helper: TimestampHelper,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PusTcDistributor {
|
impl<TmSender: EcssTmSender> PusTcDistributor<TmSender> {
|
||||||
pub fn new(tm_sender: TmTcSender, pus_router: PusTcMpscRouter) -> Self {
|
pub fn new(tm_sender: TmSender, pus_router: PusTcMpscRouter) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id: PUS_ROUTING_SERVICE.raw(),
|
id: PUS_ROUTING_SERVICE.raw(),
|
||||||
tm_sender,
|
tm_sender,
|
||||||
@ -269,6 +268,8 @@ pub trait DirectPusService {
|
|||||||
/// 3. [Self::check_for_request_timeouts] which checks for request timeouts, covering step 7.
|
/// 3. [Self::check_for_request_timeouts] which checks for request timeouts, covering step 7.
|
||||||
pub struct PusTargetedRequestService<
|
pub struct PusTargetedRequestService<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
|
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
|
||||||
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
|
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
|
||||||
@ -278,7 +279,7 @@ pub struct PusTargetedRequestService<
|
|||||||
ReplyType,
|
ReplyType,
|
||||||
> {
|
> {
|
||||||
pub service_helper:
|
pub service_helper:
|
||||||
PusServiceHelper<TcReceiver, TmTcSender, EcssTcInMemConverter, VerificationReporter>,
|
PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
|
||||||
pub request_router: GenericRequestRouter,
|
pub request_router: GenericRequestRouter,
|
||||||
pub request_converter: RequestConverter,
|
pub request_converter: RequestConverter,
|
||||||
pub active_request_map: ActiveRequestMap,
|
pub active_request_map: ActiveRequestMap,
|
||||||
@ -289,6 +290,8 @@ pub struct PusTargetedRequestService<
|
|||||||
|
|
||||||
impl<
|
impl<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
|
TmSender: EcssTmSender,
|
||||||
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
|
RequestConverter: PusTcToRequestConverter<ActiveRequestInfo, RequestType, Error = GenericConversionError>,
|
||||||
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
|
ReplyHandler: PusReplyHandler<ActiveRequestInfo, ReplyType, Error = EcssTmtcError>,
|
||||||
@ -299,6 +302,8 @@ impl<
|
|||||||
>
|
>
|
||||||
PusTargetedRequestService<
|
PusTargetedRequestService<
|
||||||
TcReceiver,
|
TcReceiver,
|
||||||
|
TmSender,
|
||||||
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
RequestConverter,
|
RequestConverter,
|
||||||
ReplyHandler,
|
ReplyHandler,
|
||||||
@ -313,8 +318,8 @@ where
|
|||||||
pub fn new(
|
pub fn new(
|
||||||
service_helper: PusServiceHelper<
|
service_helper: PusServiceHelper<
|
||||||
TcReceiver,
|
TcReceiver,
|
||||||
TmTcSender,
|
TmSender,
|
||||||
EcssTcInMemConverter,
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
>,
|
>,
|
||||||
request_converter: RequestConverter,
|
request_converter: RequestConverter,
|
||||||
@ -540,7 +545,7 @@ pub(crate) mod tests {
|
|||||||
use satrs::{
|
use satrs::{
|
||||||
pus::{
|
pus::{
|
||||||
verification::test_util::TestVerificationReporter, ActivePusRequestStd,
|
verification::test_util::TestVerificationReporter, ActivePusRequestStd,
|
||||||
ActiveRequestMapProvider, MpscTcReceiver,
|
ActiveRequestMapProvider, EcssTcInVecConverter, MpscTcReceiver,
|
||||||
},
|
},
|
||||||
request::UniqueApidTargetId,
|
request::UniqueApidTargetId,
|
||||||
spacepackets::{
|
spacepackets::{
|
||||||
@ -761,6 +766,8 @@ pub(crate) mod tests {
|
|||||||
> {
|
> {
|
||||||
pub service: PusTargetedRequestService<
|
pub service: PusTargetedRequestService<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
|
MpscTmAsVecSender,
|
||||||
|
EcssTcInVecConverter,
|
||||||
TestVerificationReporter,
|
TestVerificationReporter,
|
||||||
RequestConverter,
|
RequestConverter,
|
||||||
ReplyHandler,
|
ReplyHandler,
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use satrs::mode_tree::{ModeNode, ModeParent};
|
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
|
||||||
use satrs_example::config::pus::PUS_MODE_SERVICE;
|
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::requests::GenericRequestRouter;
|
use crate::requests::GenericRequestRouter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
use satrs::pool::SharedStaticMemoryPool;
|
||||||
use satrs::pus::verification::VerificationReporter;
|
use satrs::pus::verification::VerificationReporter;
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
DefaultActiveRequestMap, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
|
DefaultActiveRequestMap, EcssTcAndToken, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
|
||||||
PusPacketHandlingError, PusServiceHelper,
|
EcssTcInVecConverter, MpscTcReceiver, MpscTmAsVecSender, PusPacketHandlingError,
|
||||||
|
PusServiceHelper,
|
||||||
};
|
};
|
||||||
use satrs::request::GenericMessage;
|
use satrs::request::GenericMessage;
|
||||||
use satrs::{
|
use satrs::{
|
||||||
@ -34,6 +34,7 @@ use satrs::{
|
|||||||
},
|
},
|
||||||
ComponentId,
|
ComponentId,
|
||||||
};
|
};
|
||||||
|
use satrs_example::config::components::PUS_MODE_SERVICE;
|
||||||
use satrs_example::config::{mode_err, tmtc_err, CustomPusServiceId};
|
use satrs_example::config::{mode_err, tmtc_err, CustomPusServiceId};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
@ -208,20 +209,20 @@ impl PusTcToRequestConverter<ActivePusRequestStd, ModeRequest> for ModeRequestCo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_mode_service(
|
pub fn create_mode_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tc_in_mem_converter: EcssTcInMemConverter,
|
tc_pool: SharedStaticMemoryPool,
|
||||||
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
mode_router: GenericRequestRouter,
|
mode_router: GenericRequestRouter,
|
||||||
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
|
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
) -> ModeServiceWrapper {
|
) -> ModeServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let mode_request_handler = PusTargetedRequestService::new(
|
let mode_request_handler = PusTargetedRequestService::new(
|
||||||
PusServiceHelper::new(
|
PusServiceHelper::new(
|
||||||
PUS_MODE_SERVICE.id(),
|
PUS_MODE_SERVICE.id(),
|
||||||
pus_action_rx,
|
pus_action_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
|
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
|
||||||
tc_in_mem_converter,
|
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
|
||||||
),
|
),
|
||||||
ModeRequestConverter::default(),
|
ModeRequestConverter::default(),
|
||||||
DefaultActiveRequestMap::default(),
|
DefaultActiveRequestMap::default(),
|
||||||
@ -234,9 +235,36 @@ pub fn create_mode_service(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ModeServiceWrapper {
|
pub fn create_mode_service_dynamic(
|
||||||
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
|
pus_action_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
|
mode_router: GenericRequestRouter,
|
||||||
|
reply_receiver: mpsc::Receiver<GenericMessage<ModeReply>>,
|
||||||
|
) -> ModeServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
|
let mode_request_handler = PusTargetedRequestService::new(
|
||||||
|
PusServiceHelper::new(
|
||||||
|
PUS_MODE_SERVICE.id(),
|
||||||
|
pus_action_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_MODE_SERVICE.id(), PUS_MODE_SERVICE.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
),
|
||||||
|
ModeRequestConverter::default(),
|
||||||
|
DefaultActiveRequestMap::default(),
|
||||||
|
ModeReplyHandler::new(PUS_MODE_SERVICE.id()),
|
||||||
|
mode_router,
|
||||||
|
reply_receiver,
|
||||||
|
);
|
||||||
|
ModeServiceWrapper {
|
||||||
|
service: mode_request_handler,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ModeServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
|
||||||
pub(crate) service: PusTargetedRequestService<
|
pub(crate) service: PusTargetedRequestService<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
|
TmSender,
|
||||||
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
ModeRequestConverter,
|
ModeRequestConverter,
|
||||||
ModeReplyHandler,
|
ModeReplyHandler,
|
||||||
@ -247,24 +275,9 @@ pub struct ModeServiceWrapper {
|
|||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ModeNode for ModeServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> TargetedPusService
|
||||||
fn id(&self) -> ComponentId {
|
for ModeServiceWrapper<TmSender, TcInMemConverter>
|
||||||
self.service.service_helper.id()
|
{
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModeParent for ModeServiceWrapper {
|
|
||||||
type Sender = mpsc::SyncSender<GenericMessage<ModeRequest>>;
|
|
||||||
|
|
||||||
fn add_mode_child(&mut self, id: ComponentId, request_sender: Self::Sender) {
|
|
||||||
self.service
|
|
||||||
.request_router
|
|
||||||
.mode_router_map
|
|
||||||
.insert(id, request_sender);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TargetedPusService for ModeServiceWrapper {
|
|
||||||
const SERVICE_ID: u8 = CustomPusServiceId::Mode as u8;
|
const SERVICE_ID: u8 = CustomPusServiceId::Mode as u8;
|
||||||
const SERVICE_STR: &'static str = "mode";
|
const SERVICE_STR: &'static str = "mode";
|
||||||
|
|
||||||
|
@ -2,28 +2,28 @@ use std::sync::mpsc;
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::pus::create_verification_reporter;
|
use crate::pus::create_verification_reporter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use satrs::pool::{PoolProvider, StaticMemoryPool};
|
use satrs::pool::{PoolProvider, StaticMemoryPool};
|
||||||
use satrs::pus::scheduler::{PusScheduler, TcInfo};
|
use satrs::pus::scheduler::{PusScheduler, TcInfo};
|
||||||
use satrs::pus::scheduler_srv::PusSchedServiceHandler;
|
use satrs::pus::scheduler_srv::PusSchedServiceHandler;
|
||||||
use satrs::pus::verification::VerificationReporter;
|
use satrs::pus::verification::VerificationReporter;
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, MpscTcReceiver,
|
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter,
|
||||||
PartialPusHandlingError, PusServiceHelper,
|
EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTmSender, MpscTcReceiver,
|
||||||
|
MpscTmAsVecSender, PartialPusHandlingError, PusServiceHelper,
|
||||||
};
|
};
|
||||||
use satrs::spacepackets::ecss::PusServiceId;
|
use satrs::spacepackets::ecss::PusServiceId;
|
||||||
use satrs::tmtc::{PacketAsVec, PacketInPool, PacketSenderWithSharedPool};
|
use satrs::tmtc::{PacketAsVec, PacketInPool, PacketSenderWithSharedPool};
|
||||||
use satrs::ComponentId;
|
use satrs::ComponentId;
|
||||||
use satrs_example::config::pus::PUS_SCHED_SERVICE;
|
use satrs_example::config::components::PUS_SCHED_SERVICE;
|
||||||
|
|
||||||
use super::{DirectPusService, HandlingStatus};
|
use super::{DirectPusService, HandlingStatus};
|
||||||
|
|
||||||
pub trait TcReleaseProvider {
|
pub trait TcReleaser {
|
||||||
fn release(&mut self, sender_id: ComponentId, enabled: bool, info: &TcInfo, tc: &[u8]) -> bool;
|
fn release(&mut self, sender_id: ComponentId, enabled: bool, info: &TcInfo, tc: &[u8]) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TcReleaseProvider for PacketSenderWithSharedPool {
|
impl TcReleaser for PacketSenderWithSharedPool {
|
||||||
fn release(
|
fn release(
|
||||||
&mut self,
|
&mut self,
|
||||||
sender_id: ComponentId,
|
sender_id: ComponentId,
|
||||||
@ -48,7 +48,7 @@ impl TcReleaseProvider for PacketSenderWithSharedPool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TcReleaseProvider for mpsc::SyncSender<PacketAsVec> {
|
impl TcReleaser for mpsc::Sender<PacketAsVec> {
|
||||||
fn release(
|
fn release(
|
||||||
&mut self,
|
&mut self,
|
||||||
sender_id: ComponentId,
|
sender_id: ComponentId,
|
||||||
@ -65,35 +65,23 @@ impl TcReleaseProvider for mpsc::SyncSender<PacketAsVec> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
pub struct SchedulingServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
|
||||||
pub enum TcReleaser {
|
{
|
||||||
Static(PacketSenderWithSharedPool),
|
|
||||||
Heap(mpsc::SyncSender<PacketAsVec>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TcReleaseProvider for TcReleaser {
|
|
||||||
fn release(&mut self, sender_id: ComponentId, enabled: bool, info: &TcInfo, tc: &[u8]) -> bool {
|
|
||||||
match self {
|
|
||||||
TcReleaser::Static(sender) => sender.release(sender_id, enabled, info, tc),
|
|
||||||
TcReleaser::Heap(sender) => sender.release(sender_id, enabled, info, tc),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SchedulingServiceWrapper {
|
|
||||||
pub pus_11_handler: PusSchedServiceHandler<
|
pub pus_11_handler: PusSchedServiceHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
TmTcSender,
|
TmSender,
|
||||||
EcssTcInMemConverter,
|
TcInMemConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
PusScheduler,
|
PusScheduler,
|
||||||
>,
|
>,
|
||||||
pub sched_tc_pool: StaticMemoryPool,
|
pub sched_tc_pool: StaticMemoryPool,
|
||||||
pub releaser_buf: [u8; 4096],
|
pub releaser_buf: [u8; 4096],
|
||||||
pub tc_releaser: TcReleaser,
|
pub tc_releaser: Box<dyn TcReleaser + Send>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DirectPusService for SchedulingServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
|
||||||
|
for SchedulingServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
const SERVICE_ID: u8 = PusServiceId::Verification as u8;
|
const SERVICE_ID: u8 = PusServiceId::Verification as u8;
|
||||||
|
|
||||||
const SERVICE_STR: &'static str = "verification";
|
const SERVICE_STR: &'static str = "verification";
|
||||||
@ -146,7 +134,9 @@ impl DirectPusService for SchedulingServiceWrapper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SchedulingServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
|
||||||
|
SchedulingServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
pub fn release_tcs(&mut self) {
|
pub fn release_tcs(&mut self) {
|
||||||
let id = self.pus_11_handler.service_helper.id();
|
let id = self.pus_11_handler.service_helper.id();
|
||||||
let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool {
|
let releaser = |enabled: bool, info: &TcInfo, tc: &[u8]| -> bool {
|
||||||
@ -172,13 +162,12 @@ impl SchedulingServiceWrapper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_scheduler_service(
|
pub fn create_scheduler_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tc_in_mem_converter: EcssTcInMemConverter,
|
tc_releaser: PacketSenderWithSharedPool,
|
||||||
tc_releaser: TcReleaser,
|
|
||||||
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
sched_tc_pool: StaticMemoryPool,
|
sched_tc_pool: StaticMemoryPool,
|
||||||
) -> SchedulingServiceWrapper {
|
) -> SchedulingServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
||||||
.expect("Creating PUS Scheduler failed");
|
.expect("Creating PUS Scheduler failed");
|
||||||
let pus_11_handler = PusSchedServiceHandler::new(
|
let pus_11_handler = PusSchedServiceHandler::new(
|
||||||
@ -187,7 +176,7 @@ pub fn create_scheduler_service(
|
|||||||
pus_sched_rx,
|
pus_sched_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
|
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
|
||||||
tc_in_mem_converter,
|
EcssTcInSharedStoreConverter::new(tc_releaser.shared_packet_store().0.clone(), 2048),
|
||||||
),
|
),
|
||||||
scheduler,
|
scheduler,
|
||||||
);
|
);
|
||||||
@ -195,6 +184,34 @@ pub fn create_scheduler_service(
|
|||||||
pus_11_handler,
|
pus_11_handler,
|
||||||
sched_tc_pool,
|
sched_tc_pool,
|
||||||
releaser_buf: [0; 4096],
|
releaser_buf: [0; 4096],
|
||||||
tc_releaser,
|
tc_releaser: Box::new(tc_releaser),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_scheduler_service_dynamic(
|
||||||
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
|
tc_source_sender: mpsc::Sender<PacketAsVec>,
|
||||||
|
pus_sched_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
|
sched_tc_pool: StaticMemoryPool,
|
||||||
|
) -> SchedulingServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
|
//let sched_srv_receiver =
|
||||||
|
//MpscTcReceiver::new(PUS_SCHED_SERVICE.raw(), "PUS_11_TC_RECV", pus_sched_rx);
|
||||||
|
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
||||||
|
.expect("Creating PUS Scheduler failed");
|
||||||
|
let pus_11_handler = PusSchedServiceHandler::new(
|
||||||
|
PusServiceHelper::new(
|
||||||
|
PUS_SCHED_SERVICE.id(),
|
||||||
|
pus_sched_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_SCHED_SERVICE.id(), PUS_SCHED_SERVICE.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
),
|
||||||
|
scheduler,
|
||||||
|
);
|
||||||
|
SchedulingServiceWrapper {
|
||||||
|
pus_11_handler,
|
||||||
|
sched_tc_pool,
|
||||||
|
releaser_buf: [0; 4096],
|
||||||
|
tc_releaser: Box::new(tc_source_sender),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
use crate::pus::mode::ModeServiceWrapper;
|
use crate::pus::mode::ModeServiceWrapper;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use satrs::spacepackets::time::{cds, TimeWriter};
|
use satrs::{
|
||||||
|
pus::{EcssTcInMemConverter, EcssTmSender},
|
||||||
|
spacepackets::time::{cds, TimeWriter},
|
||||||
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
action::ActionServiceWrapper, event::EventServiceWrapper, hk::HkServiceWrapper,
|
action::ActionServiceWrapper, event::EventServiceWrapper, hk::HkServiceWrapper,
|
||||||
@ -8,17 +11,21 @@ use super::{
|
|||||||
HandlingStatus, TargetedPusService,
|
HandlingStatus, TargetedPusService,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: For better extensibility, we could create 2 vectors: One for direct PUS services and one
|
||||||
|
// for targeted services..
|
||||||
#[derive(new)]
|
#[derive(new)]
|
||||||
pub struct PusStack {
|
pub struct PusStack<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> {
|
||||||
pub test_srv: TestCustomServiceWrapper,
|
test_srv: TestCustomServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
pub hk_srv_wrapper: HkServiceWrapper,
|
hk_srv_wrapper: HkServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
pub event_srv: EventServiceWrapper,
|
event_srv: EventServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
pub action_srv_wrapper: ActionServiceWrapper,
|
action_srv_wrapper: ActionServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
pub schedule_srv: SchedulingServiceWrapper,
|
schedule_srv: SchedulingServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
pub mode_srv: ModeServiceWrapper,
|
mode_srv: ModeServiceWrapper<TmSender, TcInMemConverter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PusStack {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
|
||||||
|
PusStack<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
pub fn periodic_operation(&mut self) {
|
pub fn periodic_operation(&mut self) {
|
||||||
// Release all telecommands which reached their release time before calling the service
|
// Release all telecommands which reached their release time before calling the service
|
||||||
// handlers.
|
// handlers.
|
||||||
|
@ -1,34 +1,35 @@
|
|||||||
use crate::pus::create_verification_reporter;
|
use crate::pus::create_verification_reporter;
|
||||||
use crate::tmtc::sender::TmTcSender;
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use satrs::event_man::{EventMessage, EventMessageU32};
|
use satrs::event_man::{EventMessage, EventMessageU32};
|
||||||
|
use satrs::pool::SharedStaticMemoryPool;
|
||||||
use satrs::pus::test::PusService17TestHandler;
|
use satrs::pus::test::PusService17TestHandler;
|
||||||
use satrs::pus::verification::{FailParams, VerificationReporter, VerificationReportingProvider};
|
use satrs::pus::verification::{FailParams, VerificationReporter, VerificationReportingProvider};
|
||||||
use satrs::pus::PartialPusHandlingError;
|
|
||||||
use satrs::pus::{
|
use satrs::pus::{
|
||||||
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConversionProvider,
|
DirectPusPacketHandlerResult, EcssTcAndToken, EcssTcInMemConverter, EcssTcInVecConverter,
|
||||||
EcssTcInMemConverter, MpscTcReceiver, PusServiceHelper,
|
EcssTmSender, MpscTcReceiver, MpscTmAsVecSender, PusServiceHelper,
|
||||||
};
|
};
|
||||||
|
use satrs::pus::{EcssTcInSharedStoreConverter, PartialPusHandlingError};
|
||||||
use satrs::spacepackets::ecss::tc::PusTcReader;
|
use satrs::spacepackets::ecss::tc::PusTcReader;
|
||||||
use satrs::spacepackets::ecss::{PusPacket, PusServiceId};
|
use satrs::spacepackets::ecss::{PusPacket, PusServiceId};
|
||||||
use satrs_example::config::pus::PUS_TEST_SERVICE;
|
use satrs::tmtc::{PacketAsVec, PacketSenderWithSharedPool};
|
||||||
|
use satrs_example::config::components::PUS_TEST_SERVICE;
|
||||||
use satrs_example::config::{tmtc_err, TEST_EVENT};
|
use satrs_example::config::{tmtc_err, TEST_EVENT};
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
|
|
||||||
use super::{DirectPusService, HandlingStatus};
|
use super::{DirectPusService, HandlingStatus};
|
||||||
|
|
||||||
pub fn create_test_service(
|
pub fn create_test_service_static(
|
||||||
tm_sender: TmTcSender,
|
tm_sender: PacketSenderWithSharedPool,
|
||||||
tc_in_mem_converter: EcssTcInMemConverter,
|
tc_pool: SharedStaticMemoryPool,
|
||||||
event_sender: mpsc::SyncSender<EventMessageU32>,
|
event_sender: mpsc::SyncSender<EventMessageU32>,
|
||||||
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
|
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
) -> TestCustomServiceWrapper {
|
) -> TestCustomServiceWrapper<PacketSenderWithSharedPool, EcssTcInSharedStoreConverter> {
|
||||||
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
|
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
|
||||||
PUS_TEST_SERVICE.id(),
|
PUS_TEST_SERVICE.id(),
|
||||||
pus_test_rx,
|
pus_test_rx,
|
||||||
tm_sender,
|
tm_sender,
|
||||||
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
|
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
|
||||||
tc_in_mem_converter,
|
EcssTcInSharedStoreConverter::new(tc_pool, 2048),
|
||||||
));
|
));
|
||||||
TestCustomServiceWrapper {
|
TestCustomServiceWrapper {
|
||||||
handler: pus17_handler,
|
handler: pus17_handler,
|
||||||
@ -36,17 +37,34 @@ pub fn create_test_service(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TestCustomServiceWrapper {
|
pub fn create_test_service_dynamic(
|
||||||
pub handler: PusService17TestHandler<
|
tm_funnel_tx: mpsc::Sender<PacketAsVec>,
|
||||||
MpscTcReceiver,
|
event_sender: mpsc::SyncSender<EventMessageU32>,
|
||||||
TmTcSender,
|
pus_test_rx: mpsc::Receiver<EcssTcAndToken>,
|
||||||
EcssTcInMemConverter,
|
) -> TestCustomServiceWrapper<MpscTmAsVecSender, EcssTcInVecConverter> {
|
||||||
VerificationReporter,
|
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
|
||||||
>,
|
PUS_TEST_SERVICE.id(),
|
||||||
|
pus_test_rx,
|
||||||
|
tm_funnel_tx,
|
||||||
|
create_verification_reporter(PUS_TEST_SERVICE.id(), PUS_TEST_SERVICE.apid),
|
||||||
|
EcssTcInVecConverter::default(),
|
||||||
|
));
|
||||||
|
TestCustomServiceWrapper {
|
||||||
|
handler: pus17_handler,
|
||||||
|
event_tx: event_sender,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestCustomServiceWrapper<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter>
|
||||||
|
{
|
||||||
|
pub handler:
|
||||||
|
PusService17TestHandler<MpscTcReceiver, TmSender, TcInMemConverter, VerificationReporter>,
|
||||||
pub event_tx: mpsc::SyncSender<EventMessageU32>,
|
pub event_tx: mpsc::SyncSender<EventMessageU32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DirectPusService for TestCustomServiceWrapper {
|
impl<TmSender: EcssTmSender, TcInMemConverter: EcssTcInMemConverter> DirectPusService
|
||||||
|
for TestCustomServiceWrapper<TmSender, TcInMemConverter>
|
||||||
|
{
|
||||||
const SERVICE_ID: u8 = PusServiceId::Test as u8;
|
const SERVICE_ID: u8 = PusServiceId::Test as u8;
|
||||||
|
|
||||||
const SERVICE_STR: &'static str = "test";
|
const SERVICE_STR: &'static str = "test";
|
||||||
|
@ -14,7 +14,7 @@ use satrs::request::{GenericMessage, MessageMetadata, UniqueApidTargetId};
|
|||||||
use satrs::spacepackets::ecss::tc::PusTcReader;
|
use satrs::spacepackets::ecss::tc::PusTcReader;
|
||||||
use satrs::spacepackets::ecss::PusPacket;
|
use satrs::spacepackets::ecss::PusPacket;
|
||||||
use satrs::ComponentId;
|
use satrs::ComponentId;
|
||||||
use satrs_example::config::pus::PUS_ROUTING_SERVICE;
|
use satrs_example::config::components::PUS_ROUTING_SERVICE;
|
||||||
use satrs_example::config::tmtc_err;
|
use satrs_example::config::tmtc_err;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
|
|
||||||
pub trait SpiInterface {
|
|
||||||
type Error: Debug;
|
|
||||||
fn transfer(&mut self, tx: &[u8], rx: &mut [u8]) -> Result<(), Self::Error>;
|
|
||||||
}
|
|
@ -1,3 +1,2 @@
|
|||||||
pub mod sender;
|
|
||||||
pub mod tc_source;
|
pub mod tc_source;
|
||||||
pub mod tm_sink;
|
pub mod tm_sink;
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
use std::{cell::RefCell, collections::VecDeque, sync::mpsc};
|
|
||||||
|
|
||||||
use satrs::{
|
|
||||||
pus::EcssTmSender,
|
|
||||||
queue::GenericSendError,
|
|
||||||
spacepackets::ecss::WritablePusPacket,
|
|
||||||
tmtc::{PacketAsVec, PacketSenderRaw, PacketSenderWithSharedPool, StoreAndSendError},
|
|
||||||
ComponentId,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone)]
|
|
||||||
pub struct MockSender(pub RefCell<VecDeque<PacketAsVec>>);
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum TmTcSender {
|
|
||||||
Static(PacketSenderWithSharedPool),
|
|
||||||
Heap(mpsc::SyncSender<PacketAsVec>),
|
|
||||||
Mock(MockSender),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TmTcSender {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn get_mock_sender(&mut self) -> Option<&mut MockSender> {
|
|
||||||
match self {
|
|
||||||
TmTcSender::Mock(sender) => Some(sender),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EcssTmSender for TmTcSender {
|
|
||||||
fn send_tm(
|
|
||||||
&self,
|
|
||||||
sender_id: satrs::ComponentId,
|
|
||||||
tm: satrs::pus::PusTmVariant,
|
|
||||||
) -> Result<(), satrs::pus::EcssTmtcError> {
|
|
||||||
match self {
|
|
||||||
TmTcSender::Static(sync_sender) => sync_sender.send_tm(sender_id, tm),
|
|
||||||
TmTcSender::Heap(sync_sender) => match tm {
|
|
||||||
satrs::pus::PusTmVariant::InStore(_) => panic!("can not send TM in store"),
|
|
||||||
satrs::pus::PusTmVariant::Direct(pus_tm_creator) => sync_sender
|
|
||||||
.send(PacketAsVec::new(sender_id, pus_tm_creator.to_vec()?))
|
|
||||||
.map_err(|_| GenericSendError::RxDisconnected.into()),
|
|
||||||
},
|
|
||||||
TmTcSender::Mock(_) => Ok(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PacketSenderRaw for TmTcSender {
|
|
||||||
type Error = StoreAndSendError;
|
|
||||||
|
|
||||||
fn send_packet(&self, sender_id: ComponentId, packet: &[u8]) -> Result<(), Self::Error> {
|
|
||||||
match self {
|
|
||||||
TmTcSender::Static(packet_sender_with_shared_pool) => {
|
|
||||||
packet_sender_with_shared_pool.send_packet(sender_id, packet)
|
|
||||||
}
|
|
||||||
TmTcSender::Heap(sync_sender) => sync_sender
|
|
||||||
.send_packet(sender_id, packet)
|
|
||||||
.map_err(StoreAndSendError::Send),
|
|
||||||
TmTcSender::Mock(sender) => sender.send_packet(sender_id, packet),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PacketSenderRaw for MockSender {
|
|
||||||
type Error = StoreAndSendError;
|
|
||||||
|
|
||||||
fn send_packet(&self, sender_id: ComponentId, tc_raw: &[u8]) -> Result<(), Self::Error> {
|
|
||||||
let mut mut_queue = self.0.borrow_mut();
|
|
||||||
mut_queue.push_back(PacketAsVec::new(sender_id, tc_raw.to_vec()));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +1,12 @@
|
|||||||
use satrs::{
|
use satrs::{
|
||||||
pool::PoolProvider,
|
pool::PoolProvider,
|
||||||
pus::HandlingStatus,
|
pus::HandlingStatus,
|
||||||
tmtc::{PacketAsVec, PacketInPool, SharedPacketPool},
|
tmtc::{PacketAsVec, PacketInPool, PacketSenderWithSharedPool, SharedPacketPool},
|
||||||
};
|
};
|
||||||
use std::sync::mpsc::{self, TryRecvError};
|
use std::sync::mpsc::{self, TryRecvError};
|
||||||
|
|
||||||
|
use satrs::pus::MpscTmAsVecSender;
|
||||||
|
|
||||||
use crate::pus::PusTcDistributor;
|
use crate::pus::PusTcDistributor;
|
||||||
|
|
||||||
// TC source components where static pools are the backing memory of the received telecommands.
|
// TC source components where static pools are the backing memory of the received telecommands.
|
||||||
@ -12,15 +14,14 @@ pub struct TcSourceTaskStatic {
|
|||||||
shared_tc_pool: SharedPacketPool,
|
shared_tc_pool: SharedPacketPool,
|
||||||
tc_receiver: mpsc::Receiver<PacketInPool>,
|
tc_receiver: mpsc::Receiver<PacketInPool>,
|
||||||
tc_buf: [u8; 4096],
|
tc_buf: [u8; 4096],
|
||||||
pus_distributor: PusTcDistributor,
|
pus_distributor: PusTcDistributor<PacketSenderWithSharedPool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl TcSourceTaskStatic {
|
impl TcSourceTaskStatic {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
shared_tc_pool: SharedPacketPool,
|
shared_tc_pool: SharedPacketPool,
|
||||||
tc_receiver: mpsc::Receiver<PacketInPool>,
|
tc_receiver: mpsc::Receiver<PacketInPool>,
|
||||||
pus_receiver: PusTcDistributor,
|
pus_receiver: PusTcDistributor<PacketSenderWithSharedPool>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
shared_tc_pool,
|
shared_tc_pool,
|
||||||
@ -66,12 +67,14 @@ impl TcSourceTaskStatic {
|
|||||||
// TC source components where the heap is the backing memory of the received telecommands.
|
// TC source components where the heap is the backing memory of the received telecommands.
|
||||||
pub struct TcSourceTaskDynamic {
|
pub struct TcSourceTaskDynamic {
|
||||||
pub tc_receiver: mpsc::Receiver<PacketAsVec>,
|
pub tc_receiver: mpsc::Receiver<PacketAsVec>,
|
||||||
pus_distributor: PusTcDistributor,
|
pus_distributor: PusTcDistributor<MpscTmAsVecSender>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl TcSourceTaskDynamic {
|
impl TcSourceTaskDynamic {
|
||||||
pub fn new(tc_receiver: mpsc::Receiver<PacketAsVec>, pus_receiver: PusTcDistributor) -> Self {
|
pub fn new(
|
||||||
|
tc_receiver: mpsc::Receiver<PacketAsVec>,
|
||||||
|
pus_receiver: PusTcDistributor<MpscTmAsVecSender>,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
tc_receiver,
|
tc_receiver,
|
||||||
pus_distributor: pus_receiver,
|
pus_distributor: pus_receiver,
|
||||||
@ -102,18 +105,3 @@ impl TcSourceTaskDynamic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub enum TcSourceTask {
|
|
||||||
Static(TcSourceTaskStatic),
|
|
||||||
Heap(TcSourceTaskDynamic),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TcSourceTask {
|
|
||||||
pub fn periodic_operation(&mut self) {
|
|
||||||
match self {
|
|
||||||
TcSourceTask::Static(task) => task.periodic_operation(),
|
|
||||||
TcSourceTask::Heap(task) => task.periodic_operation(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -89,7 +89,6 @@ pub struct TmSinkStatic {
|
|||||||
tm_server_tx: mpsc::SyncSender<PacketInPool>,
|
tm_server_tx: mpsc::SyncSender<PacketInPool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl TmSinkStatic {
|
impl TmSinkStatic {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
shared_tm_store: SharedPacketPool,
|
shared_tm_store: SharedPacketPool,
|
||||||
@ -133,15 +132,14 @@ impl TmSinkStatic {
|
|||||||
pub struct TmSinkDynamic {
|
pub struct TmSinkDynamic {
|
||||||
common: TmFunnelCommon,
|
common: TmFunnelCommon,
|
||||||
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
|
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
|
||||||
tm_server_tx: mpsc::SyncSender<PacketAsVec>,
|
tm_server_tx: mpsc::Sender<PacketAsVec>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl TmSinkDynamic {
|
impl TmSinkDynamic {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
sync_tm_tcp_source: SyncTcpTmSource,
|
sync_tm_tcp_source: SyncTcpTmSource,
|
||||||
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
|
tm_funnel_rx: mpsc::Receiver<PacketAsVec>,
|
||||||
tm_server_tx: mpsc::SyncSender<PacketAsVec>,
|
tm_server_tx: mpsc::Sender<PacketAsVec>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
common: TmFunnelCommon::new(sync_tm_tcp_source),
|
common: TmFunnelCommon::new(sync_tm_tcp_source),
|
||||||
@ -164,18 +162,3 @@ impl TmSinkDynamic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub enum TmSink {
|
|
||||||
Static(TmSinkStatic),
|
|
||||||
Heap(TmSinkDynamic),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TmSink {
|
|
||||||
pub fn operation(&mut self) {
|
|
||||||
match self {
|
|
||||||
TmSink::Static(static_sink) => static_sink.operation(),
|
|
||||||
TmSink::Heap(dynamic_sink) => dynamic_sink.operation(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -203,7 +203,7 @@ pub mod tests {
|
|||||||
let sim_reply = sim_testbench.try_receive_next_reply();
|
let sim_reply = sim_testbench.try_receive_next_reply();
|
||||||
assert!(sim_reply.is_some());
|
assert!(sim_reply.is_some());
|
||||||
let sim_reply = sim_reply.unwrap();
|
let sim_reply = sim_reply.unwrap();
|
||||||
assert_eq!(sim_reply.component(), SimComponent::Mgm0Lis3Mdl);
|
assert_eq!(sim_reply.component(), SimComponent::MgmLis3Mdl);
|
||||||
let reply = MgmLis3MdlReply::from_sim_message(&sim_reply)
|
let reply = MgmLis3MdlReply::from_sim_message(&sim_reply)
|
||||||
.expect("failed to deserialize MGM sensor values");
|
.expect("failed to deserialize MGM sensor values");
|
||||||
assert_eq!(reply.common.switch_state, SwitchStateBinary::Off);
|
assert_eq!(reply.common.switch_state, SwitchStateBinary::Off);
|
||||||
@ -226,7 +226,7 @@ pub mod tests {
|
|||||||
let mut sim_reply_res = sim_testbench.try_receive_next_reply();
|
let mut sim_reply_res = sim_testbench.try_receive_next_reply();
|
||||||
assert!(sim_reply_res.is_some());
|
assert!(sim_reply_res.is_some());
|
||||||
let mut sim_reply = sim_reply_res.unwrap();
|
let mut sim_reply = sim_reply_res.unwrap();
|
||||||
assert_eq!(sim_reply.component(), SimComponent::Mgm0Lis3Mdl);
|
assert_eq!(sim_reply.component(), SimComponent::MgmLis3Mdl);
|
||||||
let first_reply = MgmLis3MdlReply::from_sim_message(&sim_reply)
|
let first_reply = MgmLis3MdlReply::from_sim_message(&sim_reply)
|
||||||
.expect("failed to deserialize MGM sensor values");
|
.expect("failed to deserialize MGM sensor values");
|
||||||
sim_testbench.step_until(Duration::from_millis(50)).unwrap();
|
sim_testbench.step_until(Duration::from_millis(50)).unwrap();
|
||||||
|
@ -24,8 +24,7 @@ const PCDU_REQ_WIRETAPPING: bool = false;
|
|||||||
const MGT_REQ_WIRETAPPING: bool = false;
|
const MGT_REQ_WIRETAPPING: bool = false;
|
||||||
|
|
||||||
pub struct ModelAddrWrapper {
|
pub struct ModelAddrWrapper {
|
||||||
mgm_0_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
mgm_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
||||||
mgm_1_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
|
||||||
pcdu_addr: Address<PcduModel>,
|
pcdu_addr: Address<PcduModel>,
|
||||||
mgt_addr: Address<MagnetorquerModel>,
|
mgt_addr: Address<MagnetorquerModel>,
|
||||||
}
|
}
|
||||||
@ -43,14 +42,12 @@ pub struct SimController {
|
|||||||
|
|
||||||
impl ModelAddrWrapper {
|
impl ModelAddrWrapper {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
mgm_0_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
mgm_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
||||||
mgm_1_addr: Address<MagnetometerModel<MgmLis3MdlReply>>,
|
|
||||||
pcdu_addr: Address<PcduModel>,
|
pcdu_addr: Address<PcduModel>,
|
||||||
mgt_addr: Address<MagnetorquerModel>,
|
mgt_addr: Address<MagnetorquerModel>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
mgm_0_addr,
|
mgm_addr,
|
||||||
mgm_1_addr,
|
|
||||||
pcdu_addr,
|
pcdu_addr,
|
||||||
mgt_addr,
|
mgt_addr,
|
||||||
}
|
}
|
||||||
@ -99,8 +96,7 @@ impl SimController {
|
|||||||
}
|
}
|
||||||
if let Err(e) = match request.component() {
|
if let Err(e) = match request.component() {
|
||||||
SimComponent::SimCtrl => self.handle_ctrl_request(&request),
|
SimComponent::SimCtrl => self.handle_ctrl_request(&request),
|
||||||
SimComponent::Mgm0Lis3Mdl => self.handle_mgm_request(0, &request),
|
SimComponent::MgmLis3Mdl => self.handle_mgm_request(&request),
|
||||||
SimComponent::Mgm1Lis3Mdl => self.handle_mgm_request(1, &request),
|
|
||||||
SimComponent::Mgt => self.handle_mgt_request(&request),
|
SimComponent::Mgt => self.handle_mgt_request(&request),
|
||||||
SimComponent::Pcdu => self.handle_pcdu_request(&request),
|
SimComponent::Pcdu => self.handle_pcdu_request(&request),
|
||||||
} {
|
} {
|
||||||
@ -132,25 +128,19 @@ impl SimController {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_mgm_request(
|
fn handle_mgm_request(&mut self, request: &SimRequest) -> Result<(), SimRequestError> {
|
||||||
&mut self,
|
|
||||||
mgm_idx: usize,
|
|
||||||
request: &SimRequest,
|
|
||||||
) -> Result<(), SimRequestError> {
|
|
||||||
let mgm_request = MgmRequestLis3Mdl::from_sim_message(request)?;
|
let mgm_request = MgmRequestLis3Mdl::from_sim_message(request)?;
|
||||||
if MGM_REQ_WIRETAPPING {
|
if MGM_REQ_WIRETAPPING {
|
||||||
log::info!("received MGM request: {:?}", mgm_request);
|
log::info!("received MGM request: {:?}", mgm_request);
|
||||||
}
|
}
|
||||||
match mgm_request {
|
match mgm_request {
|
||||||
MgmRequestLis3Mdl::RequestSensorData => {
|
MgmRequestLis3Mdl::RequestSensorData => {
|
||||||
let addr = match mgm_idx {
|
|
||||||
0 => &self.addr_wrapper.mgm_0_addr,
|
|
||||||
1 => &self.addr_wrapper.mgm_1_addr,
|
|
||||||
|
|
||||||
_ => panic!("invalid mgm index"),
|
|
||||||
};
|
|
||||||
self.simulation
|
self.simulation
|
||||||
.process_event(MagnetometerModel::send_sensor_values, (), addr)
|
.process_event(
|
||||||
|
MagnetometerModel::send_sensor_values,
|
||||||
|
(),
|
||||||
|
&self.addr_wrapper.mgm_addr,
|
||||||
|
)
|
||||||
.expect("event execution error for mgm");
|
.expect("event execution error for mgm");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,8 +14,7 @@ pub const SWITCH_INFO_DELAY_MS: u64 = 10;
|
|||||||
|
|
||||||
pub struct PcduModel {
|
pub struct PcduModel {
|
||||||
pub switcher_map: SwitchMapBinaryWrapper,
|
pub switcher_map: SwitchMapBinaryWrapper,
|
||||||
pub mgm_0_switch: Output<SwitchStateBinary>,
|
pub mgm_switch: Output<SwitchStateBinary>,
|
||||||
pub mgm_1_switch: Output<SwitchStateBinary>,
|
|
||||||
pub mgt_switch: Output<SwitchStateBinary>,
|
pub mgt_switch: Output<SwitchStateBinary>,
|
||||||
pub reply_sender: mpsc::Sender<SimReply>,
|
pub reply_sender: mpsc::Sender<SimReply>,
|
||||||
}
|
}
|
||||||
@ -24,8 +23,7 @@ impl PcduModel {
|
|||||||
pub fn new(reply_sender: mpsc::Sender<SimReply>) -> Self {
|
pub fn new(reply_sender: mpsc::Sender<SimReply>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
switcher_map: Default::default(),
|
switcher_map: Default::default(),
|
||||||
mgm_0_switch: Output::new(),
|
mgm_switch: Output::new(),
|
||||||
mgm_1_switch: Output::new(),
|
|
||||||
mgt_switch: Output::new(),
|
mgt_switch: Output::new(),
|
||||||
reply_sender,
|
reply_sender,
|
||||||
}
|
}
|
||||||
@ -57,7 +55,7 @@ impl PcduModel {
|
|||||||
*val = switch_and_target_state.1;
|
*val = switch_and_target_state.1;
|
||||||
match switch_and_target_state.0 {
|
match switch_and_target_state.0 {
|
||||||
PcduSwitch::Mgm => {
|
PcduSwitch::Mgm => {
|
||||||
self.mgm_0_switch.send(switch_and_target_state.1).await;
|
self.mgm_switch.send(switch_and_target_state.1).await;
|
||||||
}
|
}
|
||||||
PcduSwitch::Mgt => {
|
PcduSwitch::Mgt => {
|
||||||
self.mgt_switch.send(switch_and_target_state.1).await;
|
self.mgt_switch.send(switch_and_target_state.1).await;
|
||||||
|
@ -5,8 +5,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
|||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||||
pub enum SimComponent {
|
pub enum SimComponent {
|
||||||
SimCtrl,
|
SimCtrl,
|
||||||
Mgm0Lis3Mdl,
|
MgmLis3Mdl,
|
||||||
Mgm1Lis3Mdl,
|
|
||||||
Mgt,
|
Mgt,
|
||||||
Pcdu,
|
Pcdu,
|
||||||
}
|
}
|
||||||
@ -278,7 +277,7 @@ pub mod acs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SerializableSimMsgPayload<SimRequest> for MgmRequestLis3Mdl {
|
impl SerializableSimMsgPayload<SimRequest> for MgmRequestLis3Mdl {
|
||||||
const TARGET: SimComponent = SimComponent::Mgm0Lis3Mdl;
|
const TARGET: SimComponent = SimComponent::MgmLis3Mdl;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normally, small magnetometers generate their output as a signed 16 bit raw format or something
|
// Normally, small magnetometers generate their output as a signed 16 bit raw format or something
|
||||||
@ -369,7 +368,7 @@ pub mod acs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SerializableSimMsgPayload<SimReply> for MgmLis3MdlReply {
|
impl SerializableSimMsgPayload<SimReply> for MgmLis3MdlReply {
|
||||||
const TARGET: SimComponent = SimComponent::Mgm0Lis3Mdl;
|
const TARGET: SimComponent = SimComponent::MgmLis3Mdl;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MgmReplyProvider for MgmLis3MdlReply {
|
impl MgmReplyProvider for MgmLis3MdlReply {
|
||||||
@ -419,7 +418,7 @@ pub mod acs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SerializableSimMsgPayload<SimReply> for MgtReply {
|
impl SerializableSimMsgPayload<SimReply> for MgtReply {
|
||||||
const TARGET: SimComponent = SimComponent::Mgm0Lis3Mdl;
|
const TARGET: SimComponent = SimComponent::MgmLis3Mdl;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,15 +31,11 @@ fn create_sim_controller(
|
|||||||
request_receiver: mpsc::Receiver<SimRequest>,
|
request_receiver: mpsc::Receiver<SimRequest>,
|
||||||
) -> SimController {
|
) -> SimController {
|
||||||
// Instantiate models and their mailboxes.
|
// Instantiate models and their mailboxes.
|
||||||
let mgm_0_model =
|
let mgm_model =
|
||||||
MagnetometerModel::new_for_lis3mdl(Duration::from_millis(50), reply_sender.clone());
|
|
||||||
let mgm_1_model =
|
|
||||||
MagnetometerModel::new_for_lis3mdl(Duration::from_millis(50), reply_sender.clone());
|
MagnetometerModel::new_for_lis3mdl(Duration::from_millis(50), reply_sender.clone());
|
||||||
|
|
||||||
let mgm_0_mailbox = Mailbox::new();
|
let mgm_mailbox = Mailbox::new();
|
||||||
let mgm_0_addr = mgm_0_mailbox.address();
|
let mgm_addr = mgm_mailbox.address();
|
||||||
let mgm_1_mailbox = Mailbox::new();
|
|
||||||
let mgm_1_addr = mgm_1_mailbox.address();
|
|
||||||
let pcdu_mailbox = Mailbox::new();
|
let pcdu_mailbox = Mailbox::new();
|
||||||
let pcdu_addr = pcdu_mailbox.address();
|
let pcdu_addr = pcdu_mailbox.address();
|
||||||
let mgt_mailbox = Mailbox::new();
|
let mgt_mailbox = Mailbox::new();
|
||||||
@ -47,11 +43,8 @@ fn create_sim_controller(
|
|||||||
|
|
||||||
let mut pcdu_model = PcduModel::new(reply_sender.clone());
|
let mut pcdu_model = PcduModel::new(reply_sender.clone());
|
||||||
pcdu_model
|
pcdu_model
|
||||||
.mgm_0_switch
|
.mgm_switch
|
||||||
.connect(MagnetometerModel::switch_device, &mgm_0_addr);
|
.connect(MagnetometerModel::switch_device, &mgm_addr);
|
||||||
pcdu_model
|
|
||||||
.mgm_1_switch
|
|
||||||
.connect(MagnetometerModel::switch_device, &mgm_1_addr);
|
|
||||||
|
|
||||||
let mut mgt_model = MagnetorquerModel::new(reply_sender.clone());
|
let mut mgt_model = MagnetorquerModel::new(reply_sender.clone());
|
||||||
// Input connections.
|
// Input connections.
|
||||||
@ -59,14 +52,9 @@ fn create_sim_controller(
|
|||||||
.mgt_switch
|
.mgt_switch
|
||||||
.connect(MagnetorquerModel::switch_device, &mgt_addr);
|
.connect(MagnetorquerModel::switch_device, &mgt_addr);
|
||||||
// Output connections.
|
// Output connections.
|
||||||
mgt_model.gen_magnetic_field.connect(
|
mgt_model
|
||||||
MagnetometerModel::apply_external_magnetic_field,
|
.gen_magnetic_field
|
||||||
&mgm_0_addr,
|
.connect(MagnetometerModel::apply_external_magnetic_field, &mgm_addr);
|
||||||
);
|
|
||||||
mgt_model.gen_magnetic_field.connect(
|
|
||||||
MagnetometerModel::apply_external_magnetic_field,
|
|
||||||
&mgm_1_addr,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Instantiate the simulator
|
// Instantiate the simulator
|
||||||
let sys_clock = SystemClock::from_system_time(start_time, SystemTime::now());
|
let sys_clock = SystemClock::from_system_time(start_time, SystemTime::now());
|
||||||
@ -75,10 +63,9 @@ fn create_sim_controller(
|
|||||||
} else {
|
} else {
|
||||||
SimInit::new()
|
SimInit::new()
|
||||||
};
|
};
|
||||||
let addrs = ModelAddrWrapper::new(mgm_0_addr, mgm_1_addr, pcdu_addr, mgt_addr);
|
let addrs = ModelAddrWrapper::new(mgm_addr, pcdu_addr, mgt_addr);
|
||||||
let (simulation, scheduler) = sim_init
|
let (simulation, scheduler) = sim_init
|
||||||
.add_model(mgm_0_model, mgm_0_mailbox, "MGM 0 model")
|
.add_model(mgm_model, mgm_mailbox, "MGM model")
|
||||||
.add_model(mgm_1_model, mgm_1_mailbox, "MGM 1 model")
|
|
||||||
.add_model(pcdu_model, pcdu_mailbox, "PCDU model")
|
.add_model(pcdu_model, pcdu_mailbox, "PCDU model")
|
||||||
.add_model(mgt_model, mgt_mailbox, "MGT model")
|
.add_model(mgt_model, mgt_mailbox, "MGT model")
|
||||||
.init(start_time)
|
.init(start_time)
|
||||||
|
@ -8,10 +8,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||||||
|
|
||||||
# [unreleased]
|
# [unreleased]
|
||||||
|
|
||||||
# [v0.3.0-alpha.0] 2025-02-18
|
|
||||||
|
|
||||||
`spacepackets` v0.13
|
|
||||||
|
|
||||||
## Changed
|
## Changed
|
||||||
|
|
||||||
- Renamed `StaticPoolConfig::new` to `StaticPoolConfig::new_from_subpool_cfg_tuples`. The new
|
- Renamed `StaticPoolConfig::new` to `StaticPoolConfig::new_from_subpool_cfg_tuples`. The new
|
||||||
@ -24,17 +20,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||||||
|
|
||||||
- `StaticHeaplessMemoryPool` which can be grown with user-provided static buffers.
|
- `StaticHeaplessMemoryPool` which can be grown with user-provided static buffers.
|
||||||
- Scheduling table for systems with a standard runtime
|
- Scheduling table for systems with a standard runtime
|
||||||
- Mode Tree Feature which allows building a network of mode components which can send mode
|
|
||||||
messages to each other.
|
|
||||||
- Added first helper features like the `SubsystemExecutionHelper` and the
|
|
||||||
`SubsystemCommandingHelper` which allows to build subsystem components. Subsystem components
|
|
||||||
are able to execute mode sequences and perform target keeping based on a declarative table
|
|
||||||
format.
|
|
||||||
- Added `DevManagerCommandingHelper` which performs some of the boilerplate logik required
|
|
||||||
by Assembly and Device Management components. This includes forwarding mode requests and
|
|
||||||
handling mode replies.
|
|
||||||
- First basic health module with `HealthState`s and the `HealthTableProvider` trait. These
|
|
||||||
components are important for any FDIR components which get added in the future.
|
|
||||||
|
|
||||||
# [v0.2.1] 2024-05-19
|
# [v0.2.1] 2024-05-19
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "satrs"
|
name = "satrs"
|
||||||
version = "0.3.0-alpha.0"
|
version = "0.2.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.82.0"
|
rust-version = "1.82.0"
|
||||||
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
|
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
|
||||||
@ -13,29 +13,79 @@ keywords = ["no-std", "space", "aerospace"]
|
|||||||
categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-support", "embedded"]
|
categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-support", "embedded"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
satrs-shared = ">=0.1.3, <=0.2"
|
|
||||||
delegate = ">0.7, <=0.13"
|
delegate = ">0.7, <=0.13"
|
||||||
paste = "1"
|
paste = "1"
|
||||||
derive-new = ">=0.6, <=0.7"
|
derive-new = ">=0.6, <=0.7"
|
||||||
smallvec = "1"
|
smallvec = "1"
|
||||||
crc = "3"
|
crc = "3"
|
||||||
num_enum = { version = ">0.5, <=0.7", default-features = false }
|
|
||||||
spacepackets = { version = "0.13", default-features = false }
|
|
||||||
cobs = { version = "0.3", default-features = false }
|
|
||||||
num-traits = { version = "0.2", default-features = false }
|
|
||||||
thiserror = { version = "2", default-features = false }
|
|
||||||
|
|
||||||
hashbrown = { version = ">=0.14, <=0.15", optional = true }
|
[dependencies.satrs-shared]
|
||||||
static_cell = { version = "2", optional = true }
|
version = ">=0.1.3, <=0.2"
|
||||||
dyn-clone = { version = "1", optional = true }
|
|
||||||
heapless = { version = "0.8", optional = true }
|
[dependencies.num_enum]
|
||||||
downcast-rs = { version = "2", default-features = false, optional = true }
|
version = ">0.5, <=0.7"
|
||||||
bus = { version = "2.2", optional = true }
|
default-features = false
|
||||||
crossbeam-channel = { version = "0.5", default-features = false, optional = true }
|
|
||||||
serde = { version = "1", default-features = false, optional = true }
|
[dependencies.spacepackets]
|
||||||
socket2 = { version = "0.5", features = ["all"], optional = true }
|
version = "0.13"
|
||||||
mio = { version = "1", features = ["os-poll", "net"], optional = true }
|
default-features = false
|
||||||
defmt = { version = "0.3", optional = true }
|
|
||||||
|
[dependencies.cobs]
|
||||||
|
version = "0.3"
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dependencies.num-traits]
|
||||||
|
version = "0.2"
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dependencies.dyn-clone]
|
||||||
|
version = "1"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.hashbrown]
|
||||||
|
version = ">=0.14, <=0.15"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.heapless]
|
||||||
|
version = "0.8"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.downcast-rs]
|
||||||
|
version = "2"
|
||||||
|
default-features = false
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.bus]
|
||||||
|
version = "2.2"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.crossbeam-channel]
|
||||||
|
version= "0.5"
|
||||||
|
default-features = false
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.thiserror]
|
||||||
|
version = "2"
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dependencies.serde]
|
||||||
|
version = "1"
|
||||||
|
default-features = false
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.socket2]
|
||||||
|
version = "0.5.4"
|
||||||
|
features = ["all"]
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.mio]
|
||||||
|
version = "1"
|
||||||
|
features = ["os-poll", "net"]
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.defmt]
|
||||||
|
version = "0.3"
|
||||||
|
optional = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
serde = "1"
|
serde = "1"
|
||||||
@ -71,7 +121,7 @@ alloc = [
|
|||||||
]
|
]
|
||||||
serde = ["dep:serde", "spacepackets/serde", "satrs-shared/serde"]
|
serde = ["dep:serde", "spacepackets/serde", "satrs-shared/serde"]
|
||||||
crossbeam = ["crossbeam-channel"]
|
crossbeam = ["crossbeam-channel"]
|
||||||
heapless = ["dep:heapless", "static_cell"]
|
heapless = ["dep:heapless"]
|
||||||
defmt = ["dep:defmt", "spacepackets/defmt"]
|
defmt = ["dep:defmt", "spacepackets/defmt"]
|
||||||
test_util = []
|
test_util = []
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ pub trait DevManagerUserHook: Debug {
|
|||||||
forced: bool,
|
forced: bool,
|
||||||
children_mode_store: &mut ModeStoreVec,
|
children_mode_store: &mut ModeStoreVec,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError>;
|
) -> Result<(), GenericTargetedMessagingError>;
|
||||||
|
|
||||||
fn send_mode_cmds_to_children(
|
fn send_mode_cmds_to_children(
|
||||||
&self,
|
&self,
|
||||||
@ -44,7 +44,7 @@ pub trait DevManagerUserHook: Debug {
|
|||||||
forced: bool,
|
forced: bool,
|
||||||
children_mode_store: &mut ModeStoreVec,
|
children_mode_store: &mut ModeStoreVec,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError>;
|
) -> Result<(), GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
@ -58,7 +58,7 @@ impl DevManagerUserHook for TransparentDevManagerHook {
|
|||||||
forced: bool,
|
forced: bool,
|
||||||
children_mode_store: &mut ModeStoreVec,
|
children_mode_store: &mut ModeStoreVec,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
for child in children_mode_store {
|
for child in children_mode_store {
|
||||||
mode_req_sender.send_mode_request(
|
mode_req_sender.send_mode_request(
|
||||||
request_id,
|
request_id,
|
||||||
@ -81,7 +81,7 @@ impl DevManagerUserHook for TransparentDevManagerHook {
|
|||||||
forced: bool,
|
forced: bool,
|
||||||
children_mode_store: &mut ModeStoreVec,
|
children_mode_store: &mut ModeStoreVec,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
let mut_val = children_mode_store
|
let mut_val = children_mode_store
|
||||||
.get_mut(target_id)
|
.get_mut(target_id)
|
||||||
.ok_or(GenericSendError::TargetDoesNotExist(target_id))?;
|
.ok_or(GenericSendError::TargetDoesNotExist(target_id))?;
|
||||||
@ -144,7 +144,7 @@ impl<UserHook: DevManagerUserHook> DevManagerCommandingHelper<UserHook> {
|
|||||||
mode_and_submode: ModeAndSubmode,
|
mode_and_submode: ModeAndSubmode,
|
||||||
forced: bool,
|
forced: bool,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.state = DevManagerCommandingState::new_active_cmd(mode_and_submode, request_id);
|
self.state = DevManagerCommandingState::new_active_cmd(mode_and_submode, request_id);
|
||||||
self.user_hook.send_mode_cmd_to_child(
|
self.user_hook.send_mode_cmd_to_child(
|
||||||
request_id,
|
request_id,
|
||||||
@ -163,7 +163,7 @@ impl<UserHook: DevManagerUserHook> DevManagerCommandingHelper<UserHook> {
|
|||||||
mode_and_submode: ModeAndSubmode,
|
mode_and_submode: ModeAndSubmode,
|
||||||
forced: bool,
|
forced: bool,
|
||||||
mode_req_sender: &impl ModeRequestSender,
|
mode_req_sender: &impl ModeRequestSender,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.state = DevManagerCommandingState::new_active_cmd(mode_and_submode, request_id);
|
self.state = DevManagerCommandingState::new_active_cmd(mode_and_submode, request_id);
|
||||||
self.user_hook.send_mode_cmds_to_children(
|
self.user_hook.send_mode_cmds_to_children(
|
||||||
request_id,
|
request_id,
|
||||||
|
@ -11,7 +11,7 @@ pub use alloc_mod::*;
|
|||||||
pub use std_mod::*;
|
pub use std_mod::*;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
queue::{GenericReceiveError, GenericSendError},
|
queue::GenericTargetedMessagingError,
|
||||||
request::{
|
request::{
|
||||||
GenericMessage, MessageMetadata, MessageReceiverProvider, MessageReceiverWithId, RequestId,
|
GenericMessage, MessageMetadata, MessageReceiverProvider, MessageReceiverWithId, RequestId,
|
||||||
},
|
},
|
||||||
@ -159,13 +159,13 @@ pub trait ModeRequestSender {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeRequest,
|
request: ModeRequest,
|
||||||
) -> Result<(), GenericSendError>;
|
) -> Result<(), GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ModeRequestReceiver {
|
pub trait ModeRequestReceiver {
|
||||||
fn try_recv_mode_request(
|
fn try_recv_mode_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeRequest>>, GenericReceiveError>;
|
) -> Result<Option<GenericMessage<ModeRequest>>, GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: MessageReceiverProvider<ModeRequest>> ModeRequestReceiver
|
impl<R: MessageReceiverProvider<ModeRequest>> ModeRequestReceiver
|
||||||
@ -173,17 +173,15 @@ impl<R: MessageReceiverProvider<ModeRequest>> ModeRequestReceiver
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_request(
|
fn try_recv_mode_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeRequest>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeRequest>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, thiserror::Error)]
|
#[derive(Debug, Clone, thiserror::Error)]
|
||||||
pub enum ModeError {
|
pub enum ModeError {
|
||||||
#[error("Messaging send error: {0}")]
|
#[error("Messaging error: {0}")]
|
||||||
Send(#[from] GenericSendError),
|
Messaging(#[from] GenericTargetedMessagingError),
|
||||||
#[error("Messaging receive error: {0}")]
|
|
||||||
Receive(#[from] GenericReceiveError),
|
|
||||||
#[error("busy with other mode request")]
|
#[error("busy with other mode request")]
|
||||||
Busy,
|
Busy,
|
||||||
}
|
}
|
||||||
@ -256,8 +254,9 @@ pub trait ModeRequestHandler: ModeProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait ModeReplyReceiver {
|
pub trait ModeReplyReceiver {
|
||||||
fn try_recv_mode_reply(&self)
|
fn try_recv_mode_reply(
|
||||||
-> Result<Option<GenericMessage<ModeReply>>, GenericReceiveError>;
|
&self,
|
||||||
|
) -> Result<Option<GenericMessage<ModeReply>>, GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: MessageReceiverProvider<ModeReply>> ModeReplyReceiver
|
impl<R: MessageReceiverProvider<ModeReply>> ModeReplyReceiver
|
||||||
@ -265,7 +264,7 @@ impl<R: MessageReceiverProvider<ModeReply>> ModeReplyReceiver
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_reply(
|
fn try_recv_mode_reply(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeReply>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeReply>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -278,17 +277,14 @@ pub trait ModeReplySender {
|
|||||||
&self,
|
&self,
|
||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
reply: ModeReply,
|
reply: ModeReply,
|
||||||
) -> Result<(), GenericSendError>;
|
) -> Result<(), GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
pub mod alloc_mod {
|
pub mod alloc_mod {
|
||||||
use crate::{
|
use crate::request::{
|
||||||
queue::{GenericReceiveError, GenericSendError},
|
|
||||||
request::{
|
|
||||||
MessageSenderAndReceiver, MessageSenderMap, MessageSenderProvider,
|
MessageSenderAndReceiver, MessageSenderMap, MessageSenderProvider,
|
||||||
MessageSenderStoreProvider, RequestAndReplySenderAndReceiver,
|
MessageSenderStoreProvider, RequestAndReplySenderAndReceiver,
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -299,7 +295,7 @@ pub mod alloc_mod {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeReply,
|
request: ModeReply,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(requestor_info, target_id, request)
|
self.send_message(requestor_info, target_id, request)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -324,7 +320,7 @@ pub mod alloc_mod {
|
|||||||
&self,
|
&self,
|
||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
request: ModeReply,
|
request: ModeReply,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.message_sender_store.send_message(
|
self.message_sender_store.send_message(
|
||||||
MessageMetadata::new(requestor_info.request_id(), self.local_channel_id()),
|
MessageMetadata::new(requestor_info.request_id(), self.local_channel_id()),
|
||||||
requestor_info.sender_id(),
|
requestor_info.sender_id(),
|
||||||
@ -343,7 +339,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_reply(
|
fn try_recv_mode_reply(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeReply>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeReply>>, GenericTargetedMessagingError> {
|
||||||
self.message_receiver.try_recv_message()
|
self.message_receiver.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -403,7 +399,7 @@ pub mod alloc_mod {
|
|||||||
&self,
|
&self,
|
||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
reply: ModeReply,
|
reply: ModeReply,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.reply_sender_store.send_message(
|
self.reply_sender_store.send_message(
|
||||||
MessageMetadata::new(requestor_info.request_id(), self.local_channel_id()),
|
MessageMetadata::new(requestor_info.request_id(), self.local_channel_id()),
|
||||||
requestor_info.sender_id(),
|
requestor_info.sender_id(),
|
||||||
@ -434,7 +430,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_reply(
|
fn try_recv_mode_reply(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeReply>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeReply>>, GenericTargetedMessagingError> {
|
||||||
self.reply_receiver.try_recv_message()
|
self.reply_receiver.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -451,7 +447,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
pub fn try_recv_mode_request(
|
pub fn try_recv_mode_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeRequest>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeRequest>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -459,7 +455,7 @@ pub mod alloc_mod {
|
|||||||
&self,
|
&self,
|
||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
reply: ModeReply,
|
reply: ModeReply,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(
|
self.send_message(
|
||||||
requestor_info.request_id(),
|
requestor_info.request_id(),
|
||||||
requestor_info.sender_id(),
|
requestor_info.sender_id(),
|
||||||
@ -481,7 +477,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
pub fn try_recv_mode_reply(
|
pub fn try_recv_mode_reply(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeReply>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeReply>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -490,7 +486,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
reply: ModeRequest,
|
reply: ModeRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(request_id, target_id, reply)
|
self.send_message(request_id, target_id, reply)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -521,7 +517,7 @@ pub mod alloc_mod {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeRequest,
|
request: ModeRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(requestor_info, target_id, request)
|
self.send_message(requestor_info, target_id, request)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -540,7 +536,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_request(
|
fn try_recv_mode_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeRequest>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeRequest>>, GenericTargetedMessagingError> {
|
||||||
self.message_receiver.try_recv_message()
|
self.message_receiver.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -562,7 +558,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeRequest,
|
request: ModeRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.message_sender_store.send_message(
|
self.message_sender_store.send_message(
|
||||||
MessageMetadata::new(request_id, self.local_channel_id()),
|
MessageMetadata::new(request_id, self.local_channel_id()),
|
||||||
target_id,
|
target_id,
|
||||||
@ -626,7 +622,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeRequest,
|
request: ModeRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.request_sender_store.send_message(
|
self.request_sender_store.send_message(
|
||||||
MessageMetadata::new(request_id, self.local_channel_id()),
|
MessageMetadata::new(request_id, self.local_channel_id()),
|
||||||
target_id,
|
target_id,
|
||||||
@ -657,7 +653,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
fn try_recv_mode_request(
|
fn try_recv_mode_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ModeRequest>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ModeRequest>>, GenericTargetedMessagingError> {
|
||||||
self.request_receiver.try_recv_message()
|
self.request_receiver.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -751,7 +747,7 @@ pub(crate) mod tests {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ModeRequest,
|
request: ModeRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.requests.borrow_mut().push_back(ModeReqWrapper {
|
self.requests.borrow_mut().push_back(ModeReqWrapper {
|
||||||
request_id,
|
request_id,
|
||||||
target_id,
|
target_id,
|
||||||
|
@ -378,27 +378,74 @@ pub struct SubpoolConfig {
|
|||||||
#[cfg(feature = "heapless")]
|
#[cfg(feature = "heapless")]
|
||||||
pub mod heapless_mod {
|
pub mod heapless_mod {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use core::cell::UnsafeCell;
|
||||||
|
use core::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
|
||||||
pub struct PoolIsFull;
|
pub struct PoolIsFull;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnsafeCellBufWrapper<T> {
|
||||||
|
val: UnsafeCell<T>,
|
||||||
|
once: AtomicBool,
|
||||||
|
}
|
||||||
|
// `Sync` is required because `UnsafeCell` is not `Sync` by default.
|
||||||
|
// This is safe as long as access is manually synchronized.
|
||||||
|
unsafe impl<T> Sync for UnsafeCellBufWrapper<T> {}
|
||||||
|
|
||||||
|
impl<T: Sync> UnsafeCellBufWrapper<T> {
|
||||||
|
/// Creates a new wrapper around an arbitrary value which should be [Sync].
|
||||||
|
pub const fn new(v: T) -> Self {
|
||||||
|
unsafe { Self::new_unchecked(v) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> UnsafeCellBufWrapper<T> {
|
||||||
|
/// Creates a new wrapper around a buffer.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// Currently, the [Sync] trait is implemented for all T and ignores the usual [Sync] bound
|
||||||
|
/// on T. This API should only be called for declaring byte buffers statically or if T is
|
||||||
|
/// known to be [Sync]. You can use [new] to let the compiler do the [Sync] check.
|
||||||
|
pub const unsafe fn new_unchecked(v: T) -> Self {
|
||||||
|
Self {
|
||||||
|
val: UnsafeCell::new(v),
|
||||||
|
once: AtomicBool::new(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves a mutable reference to the internal value once.
|
||||||
|
///
|
||||||
|
/// All subsequent calls return None.
|
||||||
|
pub fn get_mut(&self) -> Option<&mut T> {
|
||||||
|
if self.once.load(Ordering::Relaxed) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
// Safety: We ensure that this is only done once with an [AtomicBool].
|
||||||
|
let mut_ref = unsafe { &mut *self.val.get() };
|
||||||
|
self.once.store(true, Ordering::Relaxed);
|
||||||
|
Some(mut_ref)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Helper macro to generate static buffers for the [crate::pool::StaticHeaplessMemoryPool].
|
/// Helper macro to generate static buffers for the [crate::pool::StaticHeaplessMemoryPool].
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! static_subpool {
|
macro_rules! static_subpool {
|
||||||
($pool_name: ident, $sizes_list_name: ident, $num_blocks: expr, $block_size: expr) => {
|
($pool_name: ident, $sizes_list_name: ident, $num_blocks: expr, $block_size: expr) => {
|
||||||
static $pool_name: static_cell::ConstStaticCell<[u8; $num_blocks * $block_size]> =
|
static $pool_name: $crate::pool::UnsafeCellBufWrapper<[u8; $num_blocks * $block_size]> =
|
||||||
static_cell::ConstStaticCell::new([0; $num_blocks * $block_size]);
|
$crate::pool::UnsafeCellBufWrapper::new([0; $num_blocks * $block_size]);
|
||||||
static $sizes_list_name: static_cell::ConstStaticCell<[usize; $num_blocks]> =
|
static $sizes_list_name: $crate::pool::UnsafeCellBufWrapper<[usize; $num_blocks]> =
|
||||||
static_cell::ConstStaticCell::new([$crate::pool::STORE_FREE; $num_blocks]);
|
$crate::pool::UnsafeCellBufWrapper::new([$crate::pool::STORE_FREE; $num_blocks]);
|
||||||
};
|
};
|
||||||
($pool_name: ident, $sizes_list_name: ident, $num_blocks: expr, $block_size: expr, $meta_data: meta) => {
|
($pool_name: ident, $sizes_list_name: ident, $num_blocks: expr, $block_size: expr, $meta_data: meta) => {
|
||||||
#[$meta_data]
|
#[$meta_data]
|
||||||
static $pool_name: static_cell::ConstStaticCell<[u8; $num_blocks * $block_size]> =
|
static $pool_name: $crate::pool::UnsafeCellBufWrapper<[u8; $num_blocks * $block_size]> =
|
||||||
static_cell::ConstStaticCell::new([0; $num_blocks * $block_size]);
|
$crate::pool::UnsafeCellBufWrapper::new([0; $num_blocks * $block_size]);
|
||||||
#[$meta_data]
|
#[$meta_data]
|
||||||
static $sizes_list_name: static_cell::ConstStaticCell<[usize; $num_blocks]> =
|
static $sizes_list_name: $crate::pool::UnsafeCellBufWrapper<[usize; $num_blocks]> =
|
||||||
static_cell::ConstStaticCell::new([$crate::pool::STORE_FREE; $num_blocks]);
|
$crate::pool::UnsafeCellBufWrapper::new([$crate::pool::STORE_FREE; $num_blocks]);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -435,14 +482,14 @@ pub mod heapless_mod {
|
|||||||
///
|
///
|
||||||
/// let mut mem_pool: StaticHeaplessMemoryPool<2> = StaticHeaplessMemoryPool::new(true);
|
/// let mut mem_pool: StaticHeaplessMemoryPool<2> = StaticHeaplessMemoryPool::new(true);
|
||||||
/// mem_pool.grow(
|
/// mem_pool.grow(
|
||||||
/// SUBPOOL_SMALL.take(),
|
/// SUBPOOL_SMALL.get_mut().unwrap(),
|
||||||
/// SUBPOOL_SMALL_SIZES.take(),
|
/// SUBPOOL_SMALL_SIZES.get_mut().unwrap(),
|
||||||
/// SUBPOOL_SMALL_NUM_BLOCKS,
|
/// SUBPOOL_SMALL_NUM_BLOCKS,
|
||||||
/// false
|
/// false
|
||||||
/// ).unwrap();
|
/// ).unwrap();
|
||||||
/// mem_pool.grow(
|
/// mem_pool.grow(
|
||||||
/// SUBPOOL_LARGE.take(),
|
/// SUBPOOL_LARGE.get_mut().unwrap(),
|
||||||
/// SUBPOOL_LARGE_SIZES.take(),
|
/// SUBPOOL_LARGE_SIZES.get_mut().unwrap(),
|
||||||
/// SUBPOOL_LARGE_NUM_BLOCKS,
|
/// SUBPOOL_LARGE_NUM_BLOCKS,
|
||||||
/// false
|
/// false
|
||||||
/// ).unwrap();
|
/// ).unwrap();
|
||||||
@ -775,7 +822,7 @@ mod alloc_mod {
|
|||||||
/// if the next fitting subpool is full. This is useful to ensure the pool remains useful
|
/// if the next fitting subpool is full. This is useful to ensure the pool remains useful
|
||||||
/// for all data sizes as long as possible. However, an undesirable side-effect might be
|
/// for all data sizes as long as possible. However, an undesirable side-effect might be
|
||||||
/// the chocking of larger subpools by underdimensioned smaller subpools.
|
/// the chocking of larger subpools by underdimensioned smaller subpools.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Clone)]
|
||||||
pub struct StaticPoolConfig {
|
pub struct StaticPoolConfig {
|
||||||
cfg: Vec<SubpoolConfig>,
|
cfg: Vec<SubpoolConfig>,
|
||||||
spill_to_higher_subpools: bool,
|
spill_to_higher_subpools: bool,
|
||||||
@ -834,7 +881,6 @@ mod alloc_mod {
|
|||||||
/// [address][PoolAddr] type. Adding any data to the pool will yield a store address.
|
/// [address][PoolAddr] type. Adding any data to the pool will yield a store address.
|
||||||
/// Modification and read operations are done using a reference to a store address. Deletion
|
/// Modification and read operations are done using a reference to a store address. Deletion
|
||||||
/// will consume the store address.
|
/// will consume the store address.
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct StaticMemoryPool {
|
pub struct StaticMemoryPool {
|
||||||
pool_cfg: StaticPoolConfig,
|
pool_cfg: StaticPoolConfig,
|
||||||
pool: Vec<Vec<u8>>,
|
pool: Vec<Vec<u8>>,
|
||||||
@ -1593,11 +1639,9 @@ mod tests {
|
|||||||
const SUBPOOL_1_BLOCK_SIZE: usize = 4;
|
const SUBPOOL_1_BLOCK_SIZE: usize = 4;
|
||||||
const SUBPOOL_1_NUM_ELEMENTS: u16 = 4;
|
const SUBPOOL_1_NUM_ELEMENTS: u16 = 4;
|
||||||
|
|
||||||
static SUBPOOL_1: static_cell::ConstStaticCell<
|
static SUBPOOL_1: UnsafeCellBufWrapper<
|
||||||
[u8; SUBPOOL_1_NUM_ELEMENTS as usize * SUBPOOL_1_BLOCK_SIZE],
|
[u8; SUBPOOL_1_NUM_ELEMENTS as usize * SUBPOOL_1_BLOCK_SIZE],
|
||||||
> = static_cell::ConstStaticCell::new(
|
> = UnsafeCellBufWrapper::new([0; SUBPOOL_1_NUM_ELEMENTS as usize * SUBPOOL_1_BLOCK_SIZE]);
|
||||||
[0; SUBPOOL_1_NUM_ELEMENTS as usize * SUBPOOL_1_BLOCK_SIZE],
|
|
||||||
);
|
|
||||||
|
|
||||||
static SUBPOOL_1_SIZES: Mutex<UnsafeCell<[usize; SUBPOOL_1_NUM_ELEMENTS as usize]>> =
|
static SUBPOOL_1_SIZES: Mutex<UnsafeCell<[usize; SUBPOOL_1_NUM_ELEMENTS as usize]>> =
|
||||||
Mutex::new(UnsafeCell::new(
|
Mutex::new(UnsafeCell::new(
|
||||||
@ -1606,14 +1650,11 @@ mod tests {
|
|||||||
|
|
||||||
const SUBPOOL_2_NUM_ELEMENTS: u16 = 2;
|
const SUBPOOL_2_NUM_ELEMENTS: u16 = 2;
|
||||||
const SUBPOOL_2_BLOCK_SIZE: usize = 8;
|
const SUBPOOL_2_BLOCK_SIZE: usize = 8;
|
||||||
static SUBPOOL_2: static_cell::ConstStaticCell<
|
static SUBPOOL_2: UnsafeCellBufWrapper<
|
||||||
[u8; SUBPOOL_2_NUM_ELEMENTS as usize * SUBPOOL_2_BLOCK_SIZE],
|
[u8; SUBPOOL_2_NUM_ELEMENTS as usize * SUBPOOL_2_BLOCK_SIZE],
|
||||||
> = static_cell::ConstStaticCell::new(
|
> = UnsafeCellBufWrapper::new([0; SUBPOOL_2_NUM_ELEMENTS as usize * SUBPOOL_2_BLOCK_SIZE]);
|
||||||
[0; SUBPOOL_2_NUM_ELEMENTS as usize * SUBPOOL_2_BLOCK_SIZE],
|
static SUBPOOL_2_SIZES: UnsafeCellBufWrapper<[usize; SUBPOOL_2_NUM_ELEMENTS as usize]> =
|
||||||
);
|
UnsafeCellBufWrapper::new([STORE_FREE; SUBPOOL_2_NUM_ELEMENTS as usize]);
|
||||||
static SUBPOOL_2_SIZES: static_cell::ConstStaticCell<
|
|
||||||
[usize; SUBPOOL_2_NUM_ELEMENTS as usize],
|
|
||||||
> = static_cell::ConstStaticCell::new([STORE_FREE; SUBPOOL_2_NUM_ELEMENTS as usize]);
|
|
||||||
|
|
||||||
const SUBPOOL_3_NUM_ELEMENTS: u16 = 1;
|
const SUBPOOL_3_NUM_ELEMENTS: u16 = 1;
|
||||||
const SUBPOOL_3_BLOCK_SIZE: usize = 16;
|
const SUBPOOL_3_BLOCK_SIZE: usize = 16;
|
||||||
@ -1656,7 +1697,7 @@ mod tests {
|
|||||||
StaticHeaplessMemoryPool::new(false);
|
StaticHeaplessMemoryPool::new(false);
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_1.take(),
|
SUBPOOL_1.get_mut().unwrap(),
|
||||||
unsafe { &mut *SUBPOOL_1_SIZES.lock().unwrap().get() },
|
unsafe { &mut *SUBPOOL_1_SIZES.lock().unwrap().get() },
|
||||||
SUBPOOL_1_NUM_ELEMENTS,
|
SUBPOOL_1_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
@ -1664,16 +1705,16 @@ mod tests {
|
|||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_2.take(),
|
SUBPOOL_2.get_mut().unwrap(),
|
||||||
SUBPOOL_2_SIZES.take(),
|
SUBPOOL_2_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_2_NUM_ELEMENTS,
|
SUBPOOL_2_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_3.take(),
|
SUBPOOL_3.get_mut().unwrap(),
|
||||||
SUBPOOL_3_SIZES.take(),
|
SUBPOOL_3_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_3_NUM_ELEMENTS,
|
SUBPOOL_3_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
@ -1795,16 +1836,16 @@ mod tests {
|
|||||||
StaticHeaplessMemoryPool::new(true);
|
StaticHeaplessMemoryPool::new(true);
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_2.take(),
|
SUBPOOL_2.get_mut().unwrap(),
|
||||||
SUBPOOL_2_SIZES.take(),
|
SUBPOOL_2_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_2_NUM_ELEMENTS,
|
SUBPOOL_2_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_4.take(),
|
SUBPOOL_4.get_mut().unwrap(),
|
||||||
SUBPOOL_4_SIZES.take(),
|
SUBPOOL_4_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_4_NUM_ELEMENTS,
|
SUBPOOL_4_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
@ -1818,16 +1859,16 @@ mod tests {
|
|||||||
StaticHeaplessMemoryPool::new(true);
|
StaticHeaplessMemoryPool::new(true);
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_5.take(),
|
SUBPOOL_5.get_mut().unwrap(),
|
||||||
SUBPOOL_5_SIZES.take(),
|
SUBPOOL_5_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_5_NUM_ELEMENTS,
|
SUBPOOL_5_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_3.take(),
|
SUBPOOL_3.get_mut().unwrap(),
|
||||||
SUBPOOL_3_SIZES.take(),
|
SUBPOOL_3_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_3_NUM_ELEMENTS,
|
SUBPOOL_3_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
@ -1841,24 +1882,24 @@ mod tests {
|
|||||||
StaticHeaplessMemoryPool::new(true);
|
StaticHeaplessMemoryPool::new(true);
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_5.take(),
|
SUBPOOL_5.get_mut().unwrap(),
|
||||||
SUBPOOL_5_SIZES.take(),
|
SUBPOOL_5_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_5_NUM_ELEMENTS,
|
SUBPOOL_5_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_6.take(),
|
SUBPOOL_6.get_mut().unwrap(),
|
||||||
SUBPOOL_6_SIZES.take(),
|
SUBPOOL_6_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_6_NUM_ELEMENTS,
|
SUBPOOL_6_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_3.take(),
|
SUBPOOL_3.get_mut().unwrap(),
|
||||||
SUBPOOL_3_SIZES.take(),
|
SUBPOOL_3_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_3_NUM_ELEMENTS,
|
SUBPOOL_3_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
@ -1872,24 +1913,24 @@ mod tests {
|
|||||||
StaticHeaplessMemoryPool::new(true);
|
StaticHeaplessMemoryPool::new(true);
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_5.take(),
|
SUBPOOL_5.get_mut().unwrap(),
|
||||||
SUBPOOL_5_SIZES.take(),
|
SUBPOOL_5_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_5_NUM_ELEMENTS,
|
SUBPOOL_5_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_6.take(),
|
SUBPOOL_6.get_mut().unwrap(),
|
||||||
SUBPOOL_6_SIZES.take(),
|
SUBPOOL_6_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_6_NUM_ELEMENTS,
|
SUBPOOL_6_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(heapless_pool
|
assert!(heapless_pool
|
||||||
.grow(
|
.grow(
|
||||||
SUBPOOL_3.take(),
|
SUBPOOL_3.get_mut().unwrap(),
|
||||||
SUBPOOL_3_SIZES.take(),
|
SUBPOOL_3_SIZES.get_mut().unwrap(),
|
||||||
SUBPOOL_3_NUM_ELEMENTS,
|
SUBPOOL_3_NUM_ELEMENTS,
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
|
@ -66,7 +66,7 @@ impl GenericActionReplyPus {
|
|||||||
pub mod alloc_mod {
|
pub mod alloc_mod {
|
||||||
use crate::{
|
use crate::{
|
||||||
action::ActionRequest,
|
action::ActionRequest,
|
||||||
queue::{GenericReceiveError, GenericSendError},
|
queue::GenericTargetedMessagingError,
|
||||||
request::{
|
request::{
|
||||||
GenericMessage, MessageReceiverProvider, MessageSenderAndReceiver,
|
GenericMessage, MessageReceiverProvider, MessageSenderAndReceiver,
|
||||||
MessageSenderProvider, MessageSenderStoreProvider, RequestId,
|
MessageSenderProvider, MessageSenderStoreProvider, RequestId,
|
||||||
@ -88,7 +88,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
pub fn try_recv_action_request(
|
pub fn try_recv_action_request(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ActionRequest>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ActionRequest>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
reply: ActionReplyPus,
|
reply: ActionReplyPus,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(request_id, target_id, reply)
|
self.send_message(request_id, target_id, reply)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -121,7 +121,7 @@ pub mod alloc_mod {
|
|||||||
{
|
{
|
||||||
pub fn try_recv_action_reply(
|
pub fn try_recv_action_reply(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<ActionReplyPus>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<ActionReplyPus>>, GenericTargetedMessagingError> {
|
||||||
self.try_recv_message()
|
self.try_recv_message()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
request: ActionRequest,
|
request: ActionRequest,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send_message(request_id, target_id, request)
|
self.send_message(request_id, target_id, request)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,14 +9,14 @@ use std::sync::mpsc::Sender;
|
|||||||
|
|
||||||
use super::verification::VerificationReportingProvider;
|
use super::verification::VerificationReportingProvider;
|
||||||
use super::{
|
use super::{
|
||||||
EcssTcInMemConversionProvider, EcssTcReceiver, EcssTmSender, GenericConversionError,
|
EcssTcInMemConverter, EcssTcReceiver, EcssTmSender, GenericConversionError,
|
||||||
GenericRoutingError, HandlingStatus, PusServiceHelper,
|
GenericRoutingError, HandlingStatus, PusServiceHelper,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct PusEventServiceHandler<
|
pub struct PusEventServiceHandler<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> {
|
> {
|
||||||
pub service_helper:
|
pub service_helper:
|
||||||
@ -27,7 +27,7 @@ pub struct PusEventServiceHandler<
|
|||||||
impl<
|
impl<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> PusEventServiceHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
> PusEventServiceHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
||||||
{
|
{
|
||||||
@ -170,7 +170,7 @@ mod tests {
|
|||||||
event_man::EventRequestWithToken,
|
event_man::EventRequestWithToken,
|
||||||
tests::PusServiceHandlerWithSharedStoreCommon,
|
tests::PusServiceHandlerWithSharedStoreCommon,
|
||||||
verification::{TcStateAccepted, VerificationToken},
|
verification::{TcStateAccepted, VerificationToken},
|
||||||
DirectPusPacketHandlerResult, EcssTcInSharedPoolConverter, PusPacketHandlingError,
|
DirectPusPacketHandlerResult, EcssTcInSharedStoreConverter, PusPacketHandlingError,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ mod tests {
|
|||||||
handler: PusEventServiceHandler<
|
handler: PusEventServiceHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
@ -947,7 +947,7 @@ pub mod std_mod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait EcssTcInMemConversionProvider {
|
pub trait EcssTcInMemConverter {
|
||||||
fn cache(&mut self, possible_packet: &TcInMemory) -> Result<(), PusTcFromMemError>;
|
fn cache(&mut self, possible_packet: &TcInMemory) -> Result<(), PusTcFromMemError>;
|
||||||
|
|
||||||
fn tc_slice_raw(&self) -> &[u8];
|
fn tc_slice_raw(&self) -> &[u8];
|
||||||
@ -980,7 +980,7 @@ pub mod std_mod {
|
|||||||
pub pus_tc_raw: Option<Vec<u8>>,
|
pub pus_tc_raw: Option<Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EcssTcInMemConversionProvider for EcssTcInVecConverter {
|
impl EcssTcInMemConverter for EcssTcInVecConverter {
|
||||||
fn cache(&mut self, tc_in_memory: &TcInMemory) -> Result<(), PusTcFromMemError> {
|
fn cache(&mut self, tc_in_memory: &TcInMemory) -> Result<(), PusTcFromMemError> {
|
||||||
self.pus_tc_raw = None;
|
self.pus_tc_raw = None;
|
||||||
match tc_in_memory {
|
match tc_in_memory {
|
||||||
@ -1011,25 +1011,24 @@ pub mod std_mod {
|
|||||||
/// [SharedStaticMemoryPool] structure. This is useful if run-time allocation for these
|
/// [SharedStaticMemoryPool] structure. This is useful if run-time allocation for these
|
||||||
/// packets should be avoided. Please note that this structure is not able to convert TCs which
|
/// packets should be avoided. Please note that this structure is not able to convert TCs which
|
||||||
/// are stored as a `Vec<u8>`.
|
/// are stored as a `Vec<u8>`.
|
||||||
#[derive(Clone)]
|
pub struct EcssTcInSharedStoreConverter {
|
||||||
pub struct EcssTcInSharedPoolConverter {
|
|
||||||
sender_id: Option<ComponentId>,
|
sender_id: Option<ComponentId>,
|
||||||
shared_tc_pool: SharedStaticMemoryPool,
|
shared_tc_store: SharedStaticMemoryPool,
|
||||||
pus_buf: Vec<u8>,
|
pus_buf: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EcssTcInSharedPoolConverter {
|
impl EcssTcInSharedStoreConverter {
|
||||||
pub fn new(shared_tc_store: SharedStaticMemoryPool, max_expected_tc_size: usize) -> Self {
|
pub fn new(shared_tc_store: SharedStaticMemoryPool, max_expected_tc_size: usize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
sender_id: None,
|
sender_id: None,
|
||||||
shared_tc_pool: shared_tc_store,
|
shared_tc_store,
|
||||||
pus_buf: alloc::vec![0; max_expected_tc_size],
|
pus_buf: alloc::vec![0; max_expected_tc_size],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn copy_tc_to_buf(&mut self, addr: PoolAddr) -> Result<(), PusTcFromMemError> {
|
pub fn copy_tc_to_buf(&mut self, addr: PoolAddr) -> Result<(), PusTcFromMemError> {
|
||||||
// Keep locked section as short as possible.
|
// Keep locked section as short as possible.
|
||||||
let mut tc_pool = self.shared_tc_pool.write().map_err(|_| {
|
let mut tc_pool = self.shared_tc_store.write().map_err(|_| {
|
||||||
PusTcFromMemError::EcssTmtc(EcssTmtcError::Store(PoolError::LockError))
|
PusTcFromMemError::EcssTmtc(EcssTmtcError::Store(PoolError::LockError))
|
||||||
})?;
|
})?;
|
||||||
let tc_size = tc_pool.len_of_data(&addr).map_err(EcssTmtcError::Store)?;
|
let tc_size = tc_pool.len_of_data(&addr).map_err(EcssTmtcError::Store)?;
|
||||||
@ -1049,7 +1048,7 @@ pub mod std_mod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EcssTcInMemConversionProvider for EcssTcInSharedPoolConverter {
|
impl EcssTcInMemConverter for EcssTcInSharedStoreConverter {
|
||||||
fn cache(&mut self, tc_in_memory: &TcInMemory) -> Result<(), PusTcFromMemError> {
|
fn cache(&mut self, tc_in_memory: &TcInMemory) -> Result<(), PusTcFromMemError> {
|
||||||
match tc_in_memory {
|
match tc_in_memory {
|
||||||
super::TcInMemory::Pool(packet_in_pool) => {
|
super::TcInMemory::Pool(packet_in_pool) => {
|
||||||
@ -1072,44 +1071,6 @@ pub mod std_mod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: alloc feature flag?
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub enum EcssTcInMemConverter {
|
|
||||||
Static(EcssTcInSharedPoolConverter),
|
|
||||||
Heap(EcssTcInVecConverter),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EcssTcInMemConverter {
|
|
||||||
pub fn new_static(static_store_converter: EcssTcInSharedPoolConverter) -> Self {
|
|
||||||
EcssTcInMemConverter::Static(static_store_converter)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_heap(heap_converter: EcssTcInVecConverter) -> Self {
|
|
||||||
EcssTcInMemConverter::Heap(heap_converter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EcssTcInMemConversionProvider for EcssTcInMemConverter {
|
|
||||||
fn cache(&mut self, tc_in_memory: &TcInMemory) -> Result<(), PusTcFromMemError> {
|
|
||||||
match self {
|
|
||||||
EcssTcInMemConverter::Static(converter) => converter.cache(tc_in_memory),
|
|
||||||
EcssTcInMemConverter::Heap(converter) => converter.cache(tc_in_memory),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn tc_slice_raw(&self) -> &[u8] {
|
|
||||||
match self {
|
|
||||||
EcssTcInMemConverter::Static(converter) => converter.tc_slice_raw(),
|
|
||||||
EcssTcInMemConverter::Heap(converter) => converter.tc_slice_raw(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn sender_id(&self) -> Option<ComponentId> {
|
|
||||||
match self {
|
|
||||||
EcssTcInMemConverter::Static(converter) => converter.sender_id(),
|
|
||||||
EcssTcInMemConverter::Heap(converter) => converter.sender_id(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct PusServiceBase<
|
pub struct PusServiceBase<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
@ -1133,7 +1094,7 @@ pub mod std_mod {
|
|||||||
pub struct PusServiceHelper<
|
pub struct PusServiceHelper<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> {
|
> {
|
||||||
pub common: PusServiceBase<TcReceiver, TmSender, VerificationReporter>,
|
pub common: PusServiceBase<TcReceiver, TmSender, VerificationReporter>,
|
||||||
@ -1143,7 +1104,7 @@ pub mod std_mod {
|
|||||||
impl<
|
impl<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
> PusServiceHelper<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
||||||
{
|
{
|
||||||
@ -1386,7 +1347,7 @@ pub mod tests {
|
|||||||
pub type PusServiceHelperStatic = PusServiceHelper<
|
pub type PusServiceHelperStatic = PusServiceHelper<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
@ -1413,7 +1374,8 @@ pub mod tests {
|
|||||||
VerificationReporter::new(TEST_COMPONENT_ID_0.id(), &verif_cfg);
|
VerificationReporter::new(TEST_COMPONENT_ID_0.id(), &verif_cfg);
|
||||||
let test_srv_tm_sender =
|
let test_srv_tm_sender =
|
||||||
PacketSenderWithSharedPool::new(tm_tx, shared_tm_pool_wrapper.clone());
|
PacketSenderWithSharedPool::new(tm_tx, shared_tm_pool_wrapper.clone());
|
||||||
let in_store_converter = EcssTcInSharedPoolConverter::new(shared_tc_pool.clone(), 2048);
|
let in_store_converter =
|
||||||
|
EcssTcInSharedStoreConverter::new(shared_tc_pool.clone(), 2048);
|
||||||
(
|
(
|
||||||
Self {
|
Self {
|
||||||
pus_buf: RefCell::new([0; 2048]),
|
pus_buf: RefCell::new([0; 2048]),
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use super::scheduler::PusSchedulerProvider;
|
use super::scheduler::PusSchedulerProvider;
|
||||||
use super::verification::{VerificationReporter, VerificationReportingProvider};
|
use super::verification::{VerificationReporter, VerificationReportingProvider};
|
||||||
use super::{
|
use super::{
|
||||||
DirectPusPacketHandlerResult, EcssTcInMemConversionProvider, EcssTcInSharedPoolConverter,
|
DirectPusPacketHandlerResult, EcssTcInMemConverter, EcssTcInSharedStoreConverter,
|
||||||
EcssTcInVecConverter, EcssTcReceiver, EcssTmSender, HandlingStatus, MpscTcReceiver,
|
EcssTcInVecConverter, EcssTcReceiver, EcssTmSender, HandlingStatus, MpscTcReceiver,
|
||||||
PartialPusHandlingError, PusServiceHelper,
|
PartialPusHandlingError, PusServiceHelper,
|
||||||
};
|
};
|
||||||
@ -24,7 +24,7 @@ use std::sync::mpsc;
|
|||||||
pub struct PusSchedServiceHandler<
|
pub struct PusSchedServiceHandler<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
PusScheduler: PusSchedulerProvider,
|
PusScheduler: PusSchedulerProvider,
|
||||||
> {
|
> {
|
||||||
@ -36,7 +36,7 @@ pub struct PusSchedServiceHandler<
|
|||||||
impl<
|
impl<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
Scheduler: PusSchedulerProvider,
|
Scheduler: PusSchedulerProvider,
|
||||||
>
|
>
|
||||||
@ -229,7 +229,7 @@ pub type PusService11SchedHandlerDynWithBoundedMpsc<PusScheduler> = PusSchedServ
|
|||||||
pub type PusService11SchedHandlerStaticWithMpsc<PusScheduler> = PusSchedServiceHandler<
|
pub type PusService11SchedHandlerStaticWithMpsc<PusScheduler> = PusSchedServiceHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
PusScheduler,
|
PusScheduler,
|
||||||
>;
|
>;
|
||||||
@ -238,7 +238,7 @@ pub type PusService11SchedHandlerStaticWithMpsc<PusScheduler> = PusSchedServiceH
|
|||||||
pub type PusService11SchedHandlerStaticWithBoundedMpsc<PusScheduler> = PusSchedServiceHandler<
|
pub type PusService11SchedHandlerStaticWithBoundedMpsc<PusScheduler> = PusSchedServiceHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
PusScheduler,
|
PusScheduler,
|
||||||
>;
|
>;
|
||||||
@ -253,7 +253,7 @@ mod tests {
|
|||||||
scheduler::{self, PusSchedulerProvider, TcInfo},
|
scheduler::{self, PusSchedulerProvider, TcInfo},
|
||||||
tests::PusServiceHandlerWithSharedStoreCommon,
|
tests::PusServiceHandlerWithSharedStoreCommon,
|
||||||
verification::{RequestId, TcStateAccepted, VerificationToken},
|
verification::{RequestId, TcStateAccepted, VerificationToken},
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
};
|
};
|
||||||
use crate::pus::{DirectPusPacketHandlerResult, MpscTcReceiver, PusPacketHandlingError};
|
use crate::pus::{DirectPusPacketHandlerResult, MpscTcReceiver, PusPacketHandlingError};
|
||||||
use crate::tmtc::PacketSenderWithSharedPool;
|
use crate::tmtc::PacketSenderWithSharedPool;
|
||||||
@ -276,7 +276,7 @@ mod tests {
|
|||||||
handler: PusSchedServiceHandler<
|
handler: PusSchedServiceHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
TestScheduler,
|
TestScheduler,
|
||||||
>,
|
>,
|
||||||
|
@ -9,9 +9,8 @@ use std::sync::mpsc;
|
|||||||
|
|
||||||
use super::verification::{VerificationReporter, VerificationReportingProvider};
|
use super::verification::{VerificationReporter, VerificationReportingProvider};
|
||||||
use super::{
|
use super::{
|
||||||
EcssTcInMemConversionProvider, EcssTcInSharedPoolConverter, EcssTcInVecConverter,
|
EcssTcInMemConverter, EcssTcInSharedStoreConverter, EcssTcInVecConverter, EcssTcReceiver,
|
||||||
EcssTcReceiver, EcssTmSender, GenericConversionError, HandlingStatus, MpscTcReceiver,
|
EcssTmSender, GenericConversionError, HandlingStatus, MpscTcReceiver, PusServiceHelper,
|
||||||
PusServiceHelper,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This is a helper class for [std] environments to handle generic PUS 17 (test service) packets.
|
/// This is a helper class for [std] environments to handle generic PUS 17 (test service) packets.
|
||||||
@ -19,7 +18,7 @@ use super::{
|
|||||||
pub struct PusService17TestHandler<
|
pub struct PusService17TestHandler<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> {
|
> {
|
||||||
pub service_helper:
|
pub service_helper:
|
||||||
@ -29,7 +28,7 @@ pub struct PusService17TestHandler<
|
|||||||
impl<
|
impl<
|
||||||
TcReceiver: EcssTcReceiver,
|
TcReceiver: EcssTcReceiver,
|
||||||
TmSender: EcssTmSender,
|
TmSender: EcssTmSender,
|
||||||
TcInMemConverter: EcssTcInMemConversionProvider,
|
TcInMemConverter: EcssTcInMemConverter,
|
||||||
VerificationReporter: VerificationReportingProvider,
|
VerificationReporter: VerificationReportingProvider,
|
||||||
> PusService17TestHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
> PusService17TestHandler<TcReceiver, TmSender, TcInMemConverter, VerificationReporter>
|
||||||
{
|
{
|
||||||
@ -128,7 +127,7 @@ pub type PusService17TestHandlerDynWithBoundedMpsc = PusService17TestHandler<
|
|||||||
pub type PusService17TestHandlerStaticWithBoundedMpsc = PusService17TestHandler<
|
pub type PusService17TestHandlerStaticWithBoundedMpsc = PusService17TestHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
@ -143,7 +142,7 @@ mod tests {
|
|||||||
};
|
};
|
||||||
use crate::pus::verification::{TcStateAccepted, VerificationToken};
|
use crate::pus::verification::{TcStateAccepted, VerificationToken};
|
||||||
use crate::pus::{
|
use crate::pus::{
|
||||||
DirectPusPacketHandlerResult, EcssTcInSharedPoolConverter, EcssTcInVecConverter,
|
DirectPusPacketHandlerResult, EcssTcInSharedStoreConverter, EcssTcInVecConverter,
|
||||||
GenericConversionError, HandlingStatus, MpscTcReceiver, MpscTmAsVecSender,
|
GenericConversionError, HandlingStatus, MpscTcReceiver, MpscTmAsVecSender,
|
||||||
PartialPusHandlingError, PusPacketHandlingError,
|
PartialPusHandlingError, PusPacketHandlingError,
|
||||||
};
|
};
|
||||||
@ -163,7 +162,7 @@ mod tests {
|
|||||||
handler: PusService17TestHandler<
|
handler: PusService17TestHandler<
|
||||||
MpscTcReceiver,
|
MpscTcReceiver,
|
||||||
PacketSenderWithSharedPool,
|
PacketSenderWithSharedPool,
|
||||||
EcssTcInSharedPoolConverter,
|
EcssTcInSharedStoreConverter,
|
||||||
VerificationReporter,
|
VerificationReporter,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
@ -13,10 +13,7 @@ use spacepackets::{
|
|||||||
ByteConversionError,
|
ByteConversionError,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{queue::GenericTargetedMessagingError, ComponentId};
|
||||||
queue::{GenericReceiveError, GenericSendError},
|
|
||||||
ComponentId,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Generic request ID type. Requests can be associated with an ID to have a unique identifier
|
/// Generic request ID type. Requests can be associated with an ID to have a unique identifier
|
||||||
/// for them. This can be useful for tasks like tracking their progress.
|
/// for them. This can be useful for tasks like tracking their progress.
|
||||||
@ -144,12 +141,12 @@ impl<Message> GenericMessage<Message> {
|
|||||||
|
|
||||||
/// Generic trait for objects which can send targeted messages.
|
/// Generic trait for objects which can send targeted messages.
|
||||||
pub trait MessageSenderProvider<MSG>: Send {
|
pub trait MessageSenderProvider<MSG>: Send {
|
||||||
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericSendError>;
|
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generic trait for objects which can receive targeted messages.
|
// Generic trait for objects which can receive targeted messages.
|
||||||
pub trait MessageReceiverProvider<MSG> {
|
pub trait MessageReceiverProvider<MSG> {
|
||||||
fn try_recv(&self) -> Result<Option<GenericMessage<MSG>>, GenericReceiveError>;
|
fn try_recv(&self) -> Result<Option<GenericMessage<MSG>>, GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MessageWithSenderIdReceiver<Msg, Receiver: MessageReceiverProvider<Msg>>(
|
pub struct MessageWithSenderIdReceiver<Msg, Receiver: MessageReceiverProvider<Msg>>(
|
||||||
@ -164,7 +161,9 @@ impl<MSG, R: MessageReceiverProvider<MSG>> From<R> for MessageWithSenderIdReceiv
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<MSG, R: MessageReceiverProvider<MSG>> MessageWithSenderIdReceiver<MSG, R> {
|
impl<MSG, R: MessageReceiverProvider<MSG>> MessageWithSenderIdReceiver<MSG, R> {
|
||||||
pub fn try_recv_message(&self) -> Result<Option<GenericMessage<MSG>>, GenericReceiveError> {
|
pub fn try_recv_message(
|
||||||
|
&self,
|
||||||
|
) -> Result<Option<GenericMessage<MSG>>, GenericTargetedMessagingError> {
|
||||||
self.0.try_recv()
|
self.0.try_recv()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -188,7 +187,9 @@ impl<MSG, R: MessageReceiverProvider<MSG>> MessageReceiverWithId<MSG, R> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<MSG, R: MessageReceiverProvider<MSG>> MessageReceiverWithId<MSG, R> {
|
impl<MSG, R: MessageReceiverProvider<MSG>> MessageReceiverWithId<MSG, R> {
|
||||||
pub fn try_recv_message(&self) -> Result<Option<GenericMessage<MSG>>, GenericReceiveError> {
|
pub fn try_recv_message(
|
||||||
|
&self,
|
||||||
|
) -> Result<Option<GenericMessage<MSG>>, GenericTargetedMessagingError> {
|
||||||
self.reply_receiver.0.try_recv()
|
self.reply_receiver.0.try_recv()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -201,7 +202,7 @@ pub trait MessageSenderStoreProvider<Message, Sender>: Default {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_channel_id: ComponentId,
|
target_channel_id: ComponentId,
|
||||||
message: Message,
|
message: Message,
|
||||||
) -> Result<(), GenericSendError>;
|
) -> Result<(), GenericTargetedMessagingError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
@ -241,14 +242,14 @@ pub mod alloc_mod {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_channel_id: ComponentId,
|
target_channel_id: ComponentId,
|
||||||
message: Msg,
|
message: Msg,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
if let Some((current_id, sender)) = &self.id_and_sender {
|
if let Some((current_id, sender)) = &self.id_and_sender {
|
||||||
if *current_id == target_channel_id {
|
if *current_id == target_channel_id {
|
||||||
sender.send(GenericMessage::new(requestor_info, message))?;
|
sender.send(GenericMessage::new(requestor_info, message))?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(GenericSendError::TargetDoesNotExist(target_channel_id))
|
Err(GenericSendError::TargetDoesNotExist(target_channel_id).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -275,14 +276,14 @@ pub mod alloc_mod {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_channel_id: ComponentId,
|
target_channel_id: ComponentId,
|
||||||
message: Msg,
|
message: Msg,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
for (current_id, sender) in &self.0 {
|
for (current_id, sender) in &self.0 {
|
||||||
if *current_id == target_channel_id {
|
if *current_id == target_channel_id {
|
||||||
sender.send(GenericMessage::new(requestor_info, message))?;
|
sender.send(GenericMessage::new(requestor_info, message))?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(GenericSendError::TargetDoesNotExist(target_channel_id))
|
Err(GenericSendError::TargetDoesNotExist(target_channel_id).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -309,7 +310,7 @@ pub mod alloc_mod {
|
|||||||
requestor_info: MessageMetadata,
|
requestor_info: MessageMetadata,
|
||||||
target_channel_id: ComponentId,
|
target_channel_id: ComponentId,
|
||||||
message: Msg,
|
message: Msg,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
if self.0.contains_key(&target_channel_id) {
|
if self.0.contains_key(&target_channel_id) {
|
||||||
return self
|
return self
|
||||||
.0
|
.0
|
||||||
@ -317,7 +318,7 @@ pub mod alloc_mod {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.send(GenericMessage::new(requestor_info, message));
|
.send(GenericMessage::new(requestor_info, message));
|
||||||
}
|
}
|
||||||
Err(GenericSendError::TargetDoesNotExist(target_channel_id))
|
Err(GenericSendError::TargetDoesNotExist(target_channel_id).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -366,7 +367,7 @@ pub mod alloc_mod {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
target_id: ComponentId,
|
target_id: ComponentId,
|
||||||
message: To,
|
message: To,
|
||||||
) -> Result<(), GenericSendError> {
|
) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.message_sender_store.send_message(
|
self.message_sender_store.send_message(
|
||||||
MessageMetadata::new(request_id, self.local_channel_id_generic()),
|
MessageMetadata::new(request_id, self.local_channel_id_generic()),
|
||||||
target_id,
|
target_id,
|
||||||
@ -377,7 +378,7 @@ pub mod alloc_mod {
|
|||||||
/// Try to receive a message, which can be a reply or a request, depending on the generics.
|
/// Try to receive a message, which can be a reply or a request, depending on the generics.
|
||||||
pub fn try_recv_message(
|
pub fn try_recv_message(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Option<GenericMessage<From>>, GenericReceiveError> {
|
) -> Result<Option<GenericMessage<From>>, GenericTargetedMessagingError> {
|
||||||
self.message_receiver.try_recv_message()
|
self.message_receiver.try_recv_message()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -453,18 +454,20 @@ pub mod std_mod {
|
|||||||
use crate::queue::{GenericReceiveError, GenericSendError};
|
use crate::queue::{GenericReceiveError, GenericSendError};
|
||||||
|
|
||||||
impl<MSG: Send> MessageSenderProvider<MSG> for mpsc::Sender<GenericMessage<MSG>> {
|
impl<MSG: Send> MessageSenderProvider<MSG> for mpsc::Sender<GenericMessage<MSG>> {
|
||||||
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericSendError> {
|
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericTargetedMessagingError> {
|
||||||
self.send(message)
|
self.send(message)
|
||||||
.map_err(|_| GenericSendError::RxDisconnected)?;
|
.map_err(|_| GenericSendError::RxDisconnected)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<MSG: Send> MessageSenderProvider<MSG> for mpsc::SyncSender<GenericMessage<MSG>> {
|
impl<MSG: Send> MessageSenderProvider<MSG> for mpsc::SyncSender<GenericMessage<MSG>> {
|
||||||
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericSendError> {
|
fn send(&self, message: GenericMessage<MSG>) -> Result<(), GenericTargetedMessagingError> {
|
||||||
if let Err(e) = self.try_send(message) {
|
if let Err(e) = self.try_send(message) {
|
||||||
return match e {
|
return match e {
|
||||||
mpsc::TrySendError::Full(_) => Err(GenericSendError::QueueFull(None)),
|
mpsc::TrySendError::Full(_) => Err(GenericSendError::QueueFull(None).into()),
|
||||||
mpsc::TrySendError::Disconnected(_) => Err(GenericSendError::RxDisconnected),
|
mpsc::TrySendError::Disconnected(_) => {
|
||||||
|
Err(GenericSendError::RxDisconnected.into())
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -475,13 +478,13 @@ pub mod std_mod {
|
|||||||
pub type MessageSenderMapBoundedMpsc<MSG> = MessageReceiverWithId<MSG, mpsc::SyncSender<MSG>>;
|
pub type MessageSenderMapBoundedMpsc<MSG> = MessageReceiverWithId<MSG, mpsc::SyncSender<MSG>>;
|
||||||
|
|
||||||
impl<MSG> MessageReceiverProvider<MSG> for mpsc::Receiver<GenericMessage<MSG>> {
|
impl<MSG> MessageReceiverProvider<MSG> for mpsc::Receiver<GenericMessage<MSG>> {
|
||||||
fn try_recv(&self) -> Result<Option<GenericMessage<MSG>>, GenericReceiveError> {
|
fn try_recv(&self) -> Result<Option<GenericMessage<MSG>>, GenericTargetedMessagingError> {
|
||||||
match self.try_recv() {
|
match self.try_recv() {
|
||||||
Ok(msg) => Ok(Some(msg)),
|
Ok(msg) => Ok(Some(msg)),
|
||||||
Err(e) => match e {
|
Err(e) => match e {
|
||||||
mpsc::TryRecvError::Empty => Ok(None),
|
mpsc::TryRecvError::Empty => Ok(None),
|
||||||
mpsc::TryRecvError::Disconnected => {
|
mpsc::TryRecvError::Disconnected => {
|
||||||
Err(GenericReceiveError::TxDisconnected(None))
|
Err(GenericReceiveError::TxDisconnected(None).into())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -502,7 +505,7 @@ mod tests {
|
|||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
queue::{GenericReceiveError, GenericSendError},
|
queue::{GenericReceiveError, GenericSendError, GenericTargetedMessagingError},
|
||||||
request::{MessageMetadata, MessageSenderMap, MessageSenderStoreProvider},
|
request::{MessageMetadata, MessageSenderMap, MessageSenderStoreProvider},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -595,7 +598,9 @@ mod tests {
|
|||||||
let reply = receiver.try_recv_message();
|
let reply = receiver.try_recv_message();
|
||||||
assert!(reply.is_err());
|
assert!(reply.is_err());
|
||||||
let error = reply.unwrap_err();
|
let error = reply.unwrap_err();
|
||||||
if let GenericReceiveError::TxDisconnected(None) = error {
|
if let GenericTargetedMessagingError::Receive(GenericReceiveError::TxDisconnected(None)) =
|
||||||
|
error
|
||||||
|
{
|
||||||
} else {
|
} else {
|
||||||
panic!("unexpected error type");
|
panic!("unexpected error type");
|
||||||
}
|
}
|
||||||
@ -644,7 +649,9 @@ mod tests {
|
|||||||
);
|
);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
let error = result.unwrap_err();
|
let error = result.unwrap_err();
|
||||||
if let GenericSendError::TargetDoesNotExist(target) = error {
|
if let GenericTargetedMessagingError::Send(GenericSendError::TargetDoesNotExist(target)) =
|
||||||
|
error
|
||||||
|
{
|
||||||
assert_eq!(target, TEST_CHANNEL_ID_2);
|
assert_eq!(target, TEST_CHANNEL_ID_2);
|
||||||
} else {
|
} else {
|
||||||
panic!("Unexpected error type");
|
panic!("Unexpected error type");
|
||||||
@ -669,7 +676,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
let error = result.unwrap_err();
|
let error = result.unwrap_err();
|
||||||
if let GenericSendError::QueueFull(capacity) = error {
|
if let GenericTargetedMessagingError::Send(GenericSendError::QueueFull(capacity)) = error {
|
||||||
assert!(capacity.is_none());
|
assert!(capacity.is_none());
|
||||||
} else {
|
} else {
|
||||||
panic!("Unexpected error type {}", error);
|
panic!("Unexpected error type {}", error);
|
||||||
@ -689,7 +696,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
let error = result.unwrap_err();
|
let error = result.unwrap_err();
|
||||||
if let GenericSendError::RxDisconnected = error {
|
if let GenericTargetedMessagingError::Send(GenericSendError::RxDisconnected) = error {
|
||||||
} else {
|
} else {
|
||||||
panic!("Unexpected error type {}", error);
|
panic!("Unexpected error type {}", error);
|
||||||
}
|
}
|
||||||
|
@ -121,10 +121,10 @@ impl SequenceExecutionHelper {
|
|||||||
/// example by checking [mode replies][ModeReply] received by the children components, and
|
/// example by checking [mode replies][ModeReply] received by the children components, and
|
||||||
/// then calling [Self::confirm_sequence_done] to advance to the sequence or complete the
|
/// then calling [Self::confirm_sequence_done] to advance to the sequence or complete the
|
||||||
/// sequence.
|
/// sequence.
|
||||||
/// * [ModeCommandingResult::Done] - The sequence is done. The user can load a new
|
/// * [ModeCommandingResult::CommandingDone] - The sequence is done. The user can load a new
|
||||||
/// sequence now without overwriting the last one. The sequence executor is in
|
/// sequence now without overwriting the last one. The sequence executor is in
|
||||||
/// [SequenceExecutionHelperState::Idle] again.
|
/// [SequenceExecutionHelperState::Idle] again.
|
||||||
/// * [ModeCommandingResult::StepDone] - The sequence has advanced one step. The user
|
/// * [ModeCommandingResult::CommandingStepDone] - The sequence has advanced one step. The user
|
||||||
/// can now call [Self::run] again to immediately execute the next step in the sequence.
|
/// can now call [Self::run] again to immediately execute the next step in the sequence.
|
||||||
///
|
///
|
||||||
/// Generally, periodic execution of the [Self::run] method should be performed while
|
/// Generally, periodic execution of the [Self::run] method should be performed while
|
||||||
|
@ -14,7 +14,6 @@ use crate::{
|
|||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub use alloc_mod::*;
|
pub use alloc_mod::*;
|
||||||
use core::fmt::Debug;
|
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
use downcast_rs::{impl_downcast, Downcast};
|
use downcast_rs::{impl_downcast, Downcast};
|
||||||
use spacepackets::{
|
use spacepackets::{
|
||||||
@ -171,7 +170,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Helper trait for any generic (static) store which allows storing raw or CCSDS packets.
|
/// Helper trait for any generic (static) store which allows storing raw or CCSDS packets.
|
||||||
pub trait CcsdsPacketPool: Debug {
|
pub trait CcsdsPacketPool {
|
||||||
fn add_ccsds_tc(&mut self, _: &SpHeader, tc_raw: &[u8]) -> Result<PoolAddr, PoolError> {
|
fn add_ccsds_tc(&mut self, _: &SpHeader, tc_raw: &[u8]) -> Result<PoolAddr, PoolError> {
|
||||||
self.add_raw_tc(tc_raw)
|
self.add_raw_tc(tc_raw)
|
||||||
}
|
}
|
||||||
@ -191,7 +190,7 @@ pub trait PusTmPool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Generic trait for any sender component able to send packets stored inside a pool structure.
|
/// Generic trait for any sender component able to send packets stored inside a pool structure.
|
||||||
pub trait PacketInPoolSender: Debug + Send {
|
pub trait PacketInPoolSender: Send {
|
||||||
fn send_packet(
|
fn send_packet(
|
||||||
&self,
|
&self,
|
||||||
sender_id: ComponentId,
|
sender_id: ComponentId,
|
||||||
@ -236,7 +235,7 @@ pub mod std_mod {
|
|||||||
|
|
||||||
/// Newtype wrapper around the [SharedStaticMemoryPool] to enable extension helper traits on
|
/// Newtype wrapper around the [SharedStaticMemoryPool] to enable extension helper traits on
|
||||||
/// top of the regular shared memory pool API.
|
/// top of the regular shared memory pool API.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SharedPacketPool(pub SharedStaticMemoryPool);
|
pub struct SharedPacketPool(pub SharedStaticMemoryPool);
|
||||||
|
|
||||||
impl SharedPacketPool {
|
impl SharedPacketPool {
|
||||||
@ -288,6 +287,7 @@ pub mod std_mod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
impl PacketSenderRaw for mpsc::Sender<PacketAsVec> {
|
impl PacketSenderRaw for mpsc::Sender<PacketAsVec> {
|
||||||
type Error = GenericSendError;
|
type Error = GenericSendError;
|
||||||
|
|
||||||
@ -297,6 +297,7 @@ pub mod std_mod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
impl PacketSenderRaw for mpsc::SyncSender<PacketAsVec> {
|
impl PacketSenderRaw for mpsc::SyncSender<PacketAsVec> {
|
||||||
type Error = GenericSendError;
|
type Error = GenericSendError;
|
||||||
|
|
||||||
@ -361,7 +362,7 @@ pub mod std_mod {
|
|||||||
|
|
||||||
/// This is the primary structure used to send packets stored in a dedicated memory pool
|
/// This is the primary structure used to send packets stored in a dedicated memory pool
|
||||||
/// structure.
|
/// structure.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Clone)]
|
||||||
pub struct PacketSenderWithSharedPool<
|
pub struct PacketSenderWithSharedPool<
|
||||||
Sender: PacketInPoolSender = mpsc::SyncSender<PacketInPool>,
|
Sender: PacketInPoolSender = mpsc::SyncSender<PacketInPool>,
|
||||||
PacketPool: CcsdsPacketPool = SharedPacketPool,
|
PacketPool: CcsdsPacketPool = SharedPacketPool,
|
||||||
|
@ -502,7 +502,7 @@ impl ModeRequestHandler for AcsSubsystem {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
self.mode_node
|
self.mode_node
|
||||||
.send_mode_reply(requestor_info, reply)
|
.send_mode_reply(requestor_info, reply)
|
||||||
.map_err(ModeError::Send)?;
|
.map_err(ModeError::Messaging)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user