- Increased example modularization by moving the majority of app logic inside dedicated modules - Added a new `dyn_tmtc` feature for the satrs-example which is used to configure the heap as the backing store for TMTC packages instead of static stores. - Added dedicated satrs-example chapter in satrs-book
This commit is contained in:
@ -1,152 +1,95 @@
|
||||
mod acs;
|
||||
mod ccsds;
|
||||
mod events;
|
||||
mod hk;
|
||||
mod logger;
|
||||
mod pus;
|
||||
mod requests;
|
||||
mod tcp;
|
||||
mod tm_funnel;
|
||||
mod tmtc;
|
||||
mod udp;
|
||||
|
||||
use log::{info, warn};
|
||||
use crate::events::EventHandler;
|
||||
use crate::pus::stack::PusStack;
|
||||
use crate::tm_funnel::{TmFunnelDynamic, TmFunnelStatic};
|
||||
use log::info;
|
||||
use pus::test::create_test_service_dynamic;
|
||||
use satrs_core::hal::std::tcp_server::ServerConfig;
|
||||
use satrs_core::hal::std::udp_server::UdpTcServer;
|
||||
use satrs_example::config::pool::{create_sched_tc_pool, create_static_pools};
|
||||
use satrs_example::config::tasks::{
|
||||
FREQ_MS_AOCS, FREQ_MS_EVENT_HANDLING, FREQ_MS_PUS_STACK, FREQ_MS_UDP_TMTC,
|
||||
};
|
||||
use satrs_example::config::{RequestTargetId, TmSenderId, OBSW_SERVER_ADDR, PUS_APID, SERVER_PORT};
|
||||
use tmtc::PusTcSourceProviderDynamic;
|
||||
use udp::DynamicUdpTmHandler;
|
||||
|
||||
use crate::acs::AcsTask;
|
||||
use crate::ccsds::CcsdsReceiver;
|
||||
use crate::hk::{AcsHkIds, HkUniqueId};
|
||||
use crate::logger::setup_logger;
|
||||
use crate::pus::action::{Pus8Wrapper, PusService8ActionHandler};
|
||||
use crate::pus::event::Pus5Wrapper;
|
||||
use crate::pus::hk::{Pus3Wrapper, PusService3HkHandler};
|
||||
use crate::pus::scheduler::Pus11Wrapper;
|
||||
use crate::pus::test::Service17CustomWrapper;
|
||||
use crate::pus::action::{create_action_service_dynamic, create_action_service_static};
|
||||
use crate::pus::event::{create_event_service_dynamic, create_event_service_static};
|
||||
use crate::pus::hk::{create_hk_service_dynamic, create_hk_service_static};
|
||||
use crate::pus::scheduler::{create_scheduler_service_dynamic, create_scheduler_service_static};
|
||||
use crate::pus::test::create_test_service_static;
|
||||
use crate::pus::{PusReceiver, PusTcMpscRouter};
|
||||
use crate::requests::{Request, RequestWithToken};
|
||||
use crate::requests::RequestWithToken;
|
||||
use crate::tcp::{SyncTcpTmSource, TcpTask};
|
||||
use crate::tmtc::{PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmtcTask};
|
||||
use crate::udp::UdpTmtcServer;
|
||||
use satrs_core::event_man::{
|
||||
EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, SendEventProvider,
|
||||
};
|
||||
use satrs_core::events::EventU32;
|
||||
use satrs_core::hk::HkRequest;
|
||||
use satrs_core::pool::{PoolProviderMemInPlace, StaticMemoryPool, StaticPoolConfig};
|
||||
use satrs_core::pus::event_man::{
|
||||
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken,
|
||||
PusEventDispatcher,
|
||||
};
|
||||
use satrs_core::pus::event_srv::PusService5EventHandler;
|
||||
use satrs_core::pus::hk::Subservice as HkSubservice;
|
||||
use satrs_core::pus::scheduler::PusScheduler;
|
||||
use satrs_core::pus::scheduler_srv::PusService11SchedHandler;
|
||||
use satrs_core::pus::test::PusService17TestHandler;
|
||||
use satrs_core::pus::verification::{
|
||||
TcStateStarted, VerificationReporterCfg, VerificationReporterWithSender, VerificationToken,
|
||||
};
|
||||
use satrs_core::pus::{
|
||||
EcssTcInSharedStoreConverter, MpscTcReceiver, MpscTmInStoreSender, PusServiceHelper,
|
||||
};
|
||||
use satrs_core::seq_count::{CcsdsSimpleSeqCountProvider, SequenceCountProviderCore};
|
||||
use satrs_core::spacepackets::ecss::tm::{PusTmCreator, PusTmZeroCopyWriter};
|
||||
use satrs_core::spacepackets::{
|
||||
ecss::tm::PusTmSecondaryHeader, time::cds::TimeProvider, time::TimeWriter, SequenceFlags,
|
||||
SpHeader,
|
||||
use crate::tmtc::{
|
||||
PusTcSourceProviderSharedPool, SharedTcPool, TcArgs, TmArgs, TmtcTaskDynamic, TmtcTaskStatic,
|
||||
};
|
||||
use crate::udp::{StaticUdpTmHandler, UdpTmtcServer};
|
||||
use satrs_core::pus::event_man::EventRequestWithToken;
|
||||
use satrs_core::pus::verification::{VerificationReporterCfg, VerificationReporterWithSender};
|
||||
use satrs_core::pus::{EcssTmSender, MpscTmAsVecSender, MpscTmInStoreSender};
|
||||
use satrs_core::spacepackets::{time::cds::TimeProvider, time::TimeWriter};
|
||||
use satrs_core::tmtc::tm_helper::SharedTmStore;
|
||||
use satrs_core::tmtc::{CcsdsDistributor, TargetId};
|
||||
use satrs_core::ChannelId;
|
||||
use satrs_example::{
|
||||
RequestTargetId, TargetIdWithApid, TcReceiverId, TmSenderId, OBSW_SERVER_ADDR, PUS_APID,
|
||||
SERVER_PORT,
|
||||
};
|
||||
use satrs_example::TargetIdWithApid;
|
||||
use std::collections::HashMap;
|
||||
use std::net::{IpAddr, SocketAddr};
|
||||
use std::sync::mpsc::{channel, TryRecvError};
|
||||
use std::sync::mpsc::{self, channel};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
fn main() {
|
||||
setup_logger().expect("setting up logging with fern failed");
|
||||
println!("Running OBSW example");
|
||||
let tm_pool = StaticMemoryPool::new(StaticPoolConfig::new(vec![
|
||||
(30, 32),
|
||||
(15, 64),
|
||||
(15, 128),
|
||||
(15, 256),
|
||||
(15, 1024),
|
||||
(15, 2048),
|
||||
]));
|
||||
fn create_verification_reporter(verif_sender: impl EcssTmSender) -> VerificationReporterWithSender {
|
||||
let verif_cfg = VerificationReporterCfg::new(PUS_APID, 1, 2, 8).unwrap();
|
||||
// Every software component which needs to generate verification telemetry, gets a cloned
|
||||
// verification reporter.
|
||||
VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn static_tmtc_pool_main() {
|
||||
let (tm_pool, tc_pool) = create_static_pools();
|
||||
let shared_tm_store = SharedTmStore::new(tm_pool);
|
||||
let tm_store_event = shared_tm_store.clone();
|
||||
let tc_pool = StaticMemoryPool::new(StaticPoolConfig::new(vec![
|
||||
(30, 32),
|
||||
(15, 64),
|
||||
(15, 128),
|
||||
(15, 256),
|
||||
(15, 1024),
|
||||
(15, 2048),
|
||||
]));
|
||||
let tc_store = TcStore {
|
||||
let shared_tc_pool = SharedTcPool {
|
||||
pool: Arc::new(RwLock::new(tc_pool)),
|
||||
};
|
||||
let sched_tc_pool = StaticMemoryPool::new(StaticPoolConfig::new(vec![
|
||||
(30, 32),
|
||||
(15, 64),
|
||||
(15, 128),
|
||||
(15, 256),
|
||||
(15, 1024),
|
||||
(15, 2048),
|
||||
]));
|
||||
|
||||
let seq_count_provider = CcsdsSimpleSeqCountProvider::new();
|
||||
let mut msg_counter_map: HashMap<u8, u16> = HashMap::new();
|
||||
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
||||
let (tc_source_tx, tc_source_rx) = channel();
|
||||
let (tm_funnel_tx, tm_funnel_rx) = channel();
|
||||
let (tm_server_tx, tm_server_rx) = channel();
|
||||
let verif_sender = MpscTmInStoreSender::new(
|
||||
|
||||
// Every software component which needs to generate verification telemetry, receives a cloned
|
||||
// verification reporter.
|
||||
let verif_reporter = create_verification_reporter(MpscTmInStoreSender::new(
|
||||
TmSenderId::PusVerification as ChannelId,
|
||||
"verif_sender",
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let verif_cfg = VerificationReporterCfg::new(PUS_APID, 1, 2, 8).unwrap();
|
||||
// Every software component which needs to generate verification telemetry, gets a cloned
|
||||
// verification reporter.
|
||||
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
|
||||
let reporter_event_handler = verif_reporter.clone();
|
||||
let reporter_aocs = verif_reporter.clone();
|
||||
|
||||
// Create event handling components
|
||||
// These sender handles are used to send event requests, for example to enable or disable
|
||||
// certain events
|
||||
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
|
||||
// The sender handle is the primary sender handle for all components which want to create events.
|
||||
// The event manager will receive the RX handle to receive all the events.
|
||||
let (event_sender, event_man_rx) = channel();
|
||||
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
|
||||
let test_srv_event_sender = event_sender;
|
||||
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
|
||||
|
||||
// All events sent to the manager are routed to the PUS event manager, which generates PUS event
|
||||
// telemetry for each event.
|
||||
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
|
||||
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
|
||||
let mut pus_event_dispatcher =
|
||||
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
|
||||
let (pus_event_man_tx, pus_event_man_rx) = channel();
|
||||
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
|
||||
event_man.subscribe_all(pus_event_man_send_provider.id());
|
||||
event_man.add_sender(pus_event_man_send_provider);
|
||||
));
|
||||
|
||||
let acs_target_id = TargetIdWithApid::new(PUS_APID, RequestTargetId::AcsSubsystem as TargetId);
|
||||
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
|
||||
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||
let mut request_map = HashMap::new();
|
||||
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
|
||||
let target_apid = TargetIdWithApid::new(PUS_APID, RequestTargetId::AcsSubsystem as TargetId);
|
||||
request_map.insert(target_apid, acs_thread_tx);
|
||||
request_map.insert(acs_target_id, acs_thread_tx);
|
||||
|
||||
let tc_source_wrapper = PusTcSource {
|
||||
tc_store: tc_store.clone(),
|
||||
let tc_source_wrapper = PusTcSourceProviderSharedPool {
|
||||
shared_pool: shared_tc_pool.clone(),
|
||||
tc_source: tc_source_tx,
|
||||
};
|
||||
|
||||
@ -161,8 +104,23 @@ fn main() {
|
||||
tm_udp_server_rx: tm_server_rx,
|
||||
};
|
||||
|
||||
let aocs_tm_funnel = tm_funnel_tx.clone();
|
||||
let aocs_tm_store = shared_tm_store.clone();
|
||||
// Create event handling components
|
||||
// These sender handles are used to send event requests, for example to enable or disable
|
||||
// certain events.
|
||||
let (event_request_tx, event_request_rx) = mpsc::channel::<EventRequestWithToken>();
|
||||
|
||||
// The event task is the core handler to perform the event routing and TM handling as specified
|
||||
// in the sat-rs documentation.
|
||||
let mut event_handler = EventHandler::new(
|
||||
MpscTmInStoreSender::new(
|
||||
TmSenderId::AllEvents as ChannelId,
|
||||
"ALL_EVENTS_TX",
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
),
|
||||
verif_reporter.clone(),
|
||||
event_request_rx,
|
||||
);
|
||||
|
||||
let (pus_test_tx, pus_test_rx) = channel();
|
||||
let (pus_event_tx, pus_event_rx) = channel();
|
||||
@ -176,134 +134,102 @@ fn main() {
|
||||
hk_service_receiver: pus_hk_tx,
|
||||
action_service_receiver: pus_action_tx,
|
||||
};
|
||||
let test_srv_tm_sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::PusTest as ChannelId,
|
||||
"PUS_17_TM_SENDER",
|
||||
let pus_test_service = create_test_service_static(
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let test_srv_receiver = MpscTcReceiver::new(
|
||||
TcReceiverId::PusTest as ChannelId,
|
||||
"PUS_17_TC_RECV",
|
||||
verif_reporter.clone(),
|
||||
shared_tc_pool.pool.clone(),
|
||||
event_handler.clone_event_sender(),
|
||||
pus_test_rx,
|
||||
);
|
||||
let pus17_handler = PusService17TestHandler::new(PusServiceHelper::new(
|
||||
Box::new(test_srv_receiver),
|
||||
Box::new(test_srv_tm_sender),
|
||||
PUS_APID,
|
||||
let pus_scheduler_service = create_scheduler_service_static(
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
EcssTcInSharedStoreConverter::new(tc_store.pool.clone(), 2048),
|
||||
));
|
||||
let mut pus_17_wrapper = Service17CustomWrapper {
|
||||
pus17_handler,
|
||||
test_srv_event_sender,
|
||||
};
|
||||
|
||||
let sched_srv_tm_sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::PusSched as ChannelId,
|
||||
"PUS_11_TM_SENDER",
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let sched_srv_receiver = MpscTcReceiver::new(
|
||||
TcReceiverId::PusSched as ChannelId,
|
||||
"PUS_11_TC_RECV",
|
||||
pus_sched_rx,
|
||||
);
|
||||
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
||||
.expect("Creating PUS Scheduler failed");
|
||||
let pus_11_handler = PusService11SchedHandler::new(
|
||||
PusServiceHelper::new(
|
||||
Box::new(sched_srv_receiver),
|
||||
Box::new(sched_srv_tm_sender),
|
||||
PUS_APID,
|
||||
verif_reporter.clone(),
|
||||
EcssTcInSharedStoreConverter::new(tc_store.pool.clone(), 2048),
|
||||
),
|
||||
scheduler,
|
||||
);
|
||||
let mut pus_11_wrapper = Pus11Wrapper {
|
||||
pus_11_handler,
|
||||
sched_tc_pool,
|
||||
tc_source_wrapper,
|
||||
};
|
||||
|
||||
let event_srv_tm_sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::PusEvent as ChannelId,
|
||||
"PUS_5_TM_SENDER",
|
||||
pus_sched_rx,
|
||||
create_sched_tc_pool(),
|
||||
);
|
||||
let pus_event_service = create_event_service_static(
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let event_srv_receiver = MpscTcReceiver::new(
|
||||
TcReceiverId::PusEvent as ChannelId,
|
||||
"PUS_5_TC_RECV",
|
||||
verif_reporter.clone(),
|
||||
shared_tc_pool.pool.clone(),
|
||||
pus_event_rx,
|
||||
);
|
||||
let pus_5_handler = PusService5EventHandler::new(
|
||||
PusServiceHelper::new(
|
||||
Box::new(event_srv_receiver),
|
||||
Box::new(event_srv_tm_sender),
|
||||
PUS_APID,
|
||||
verif_reporter.clone(),
|
||||
EcssTcInSharedStoreConverter::new(tc_store.pool.clone(), 2048),
|
||||
),
|
||||
event_request_tx,
|
||||
);
|
||||
let mut pus_5_wrapper = Pus5Wrapper { pus_5_handler };
|
||||
|
||||
let action_srv_tm_sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::PusAction as ChannelId,
|
||||
"PUS_8_TM_SENDER",
|
||||
let pus_action_service = create_action_service_static(
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let action_srv_receiver = MpscTcReceiver::new(
|
||||
TcReceiverId::PusAction as ChannelId,
|
||||
"PUS_8_TC_RECV",
|
||||
pus_action_rx,
|
||||
);
|
||||
let pus_8_handler = PusService8ActionHandler::new(
|
||||
Box::new(action_srv_receiver),
|
||||
Box::new(action_srv_tm_sender),
|
||||
PUS_APID,
|
||||
verif_reporter.clone(),
|
||||
EcssTcInSharedStoreConverter::new(tc_store.pool.clone(), 2048),
|
||||
shared_tc_pool.pool.clone(),
|
||||
pus_action_rx,
|
||||
request_map.clone(),
|
||||
);
|
||||
let mut pus_8_wrapper = Pus8Wrapper { pus_8_handler };
|
||||
|
||||
let hk_srv_tm_sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::PusHk as ChannelId,
|
||||
"PUS_3_TM_SENDER",
|
||||
let pus_hk_service = create_hk_service_static(
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
);
|
||||
let hk_srv_receiver =
|
||||
MpscTcReceiver::new(TcReceiverId::PusHk as ChannelId, "PUS_8_TC_RECV", pus_hk_rx);
|
||||
let pus_3_handler = PusService3HkHandler::new(
|
||||
Box::new(hk_srv_receiver),
|
||||
Box::new(hk_srv_tm_sender),
|
||||
PUS_APID,
|
||||
verif_reporter.clone(),
|
||||
EcssTcInSharedStoreConverter::new(tc_store.pool.clone(), 2048),
|
||||
shared_tc_pool.pool.clone(),
|
||||
pus_hk_rx,
|
||||
request_map,
|
||||
);
|
||||
let mut pus_3_wrapper = Pus3Wrapper { pus_3_handler };
|
||||
let mut pus_stack = PusStack::new(
|
||||
pus_hk_service,
|
||||
pus_event_service,
|
||||
pus_action_service,
|
||||
pus_scheduler_service,
|
||||
pus_test_service,
|
||||
);
|
||||
|
||||
let ccsds_receiver = CcsdsReceiver {
|
||||
tc_source: tc_args.tc_source.clone(),
|
||||
};
|
||||
let mut tmtc_task = TmtcTask::new(tc_args, PusReceiver::new(verif_reporter, pus_router));
|
||||
let mut tmtc_task = TmtcTaskStatic::new(
|
||||
tc_args,
|
||||
PusReceiver::new(verif_reporter.clone(), pus_router),
|
||||
);
|
||||
|
||||
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
||||
let udp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver.clone()));
|
||||
let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor))
|
||||
.expect("creating UDP TMTC server failed");
|
||||
let mut udp_tmtc_server = UdpTmtcServer {
|
||||
udp_tc_server,
|
||||
tm_rx: tm_args.tm_udp_server_rx,
|
||||
tm_store: tm_args.tm_store.clone_backing_pool(),
|
||||
tm_handler: StaticUdpTmHandler {
|
||||
tm_rx: tm_args.tm_udp_server_rx,
|
||||
tm_store: tm_args.tm_store.clone_backing_pool(),
|
||||
},
|
||||
};
|
||||
|
||||
let tcp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver));
|
||||
let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192);
|
||||
let sync_tm_tcp_source = SyncTcpTmSource::new(200);
|
||||
let mut tcp_server = TcpTask::new(
|
||||
tcp_server_cfg,
|
||||
sync_tm_tcp_source.clone(),
|
||||
tcp_ccsds_distributor,
|
||||
)
|
||||
.expect("tcp server creation failed");
|
||||
|
||||
let mut acs_task = AcsTask::new(
|
||||
MpscTmInStoreSender::new(
|
||||
TmSenderId::AcsSubsystem as ChannelId,
|
||||
"ACS_TASK_SENDER",
|
||||
shared_tm_store.clone(),
|
||||
tm_funnel_tx.clone(),
|
||||
),
|
||||
acs_thread_rx,
|
||||
verif_reporter,
|
||||
);
|
||||
|
||||
let mut tm_funnel = TmFunnelStatic::new(
|
||||
shared_tm_store,
|
||||
sync_tm_tcp_source,
|
||||
tm_funnel_rx,
|
||||
tm_server_tx,
|
||||
);
|
||||
|
||||
info!("Starting TMTC and UDP task");
|
||||
let jh_udp_tmtc = thread::Builder::new()
|
||||
.name("TMTC and UDP".to_string())
|
||||
@ -312,20 +238,11 @@ fn main() {
|
||||
loop {
|
||||
udp_tmtc_server.periodic_operation();
|
||||
tmtc_task.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_UDP_TMTC));
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let tcp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver));
|
||||
let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192);
|
||||
let mut sync_tm_tcp_source = SyncTcpTmSource::new(200);
|
||||
let mut tcp_server = TcpTask::new(
|
||||
tcp_server_cfg,
|
||||
sync_tm_tcp_source.clone(),
|
||||
tcp_ccsds_distributor,
|
||||
)
|
||||
.expect("tcp server creation failed");
|
||||
info!("Starting TCP task");
|
||||
let jh_tcp = thread::Builder::new()
|
||||
.name("TCP".to_string())
|
||||
@ -338,225 +255,271 @@ fn main() {
|
||||
.unwrap();
|
||||
|
||||
info!("Starting TM funnel task");
|
||||
let jh1 = thread::Builder::new()
|
||||
let jh_tm_funnel = thread::Builder::new()
|
||||
.name("TM Funnel".to_string())
|
||||
.spawn(move || {
|
||||
let tm_funnel = TmFunnel {
|
||||
tm_server_tx,
|
||||
tm_funnel_rx,
|
||||
};
|
||||
loop {
|
||||
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
|
||||
// Read the TM, set sequence counter and message counter, and finally update
|
||||
// the CRC.
|
||||
let shared_pool = shared_tm_store.clone_backing_pool();
|
||||
let mut pool_guard = shared_pool.write().expect("Locking TM pool failed");
|
||||
let tm_raw = pool_guard
|
||||
.modify(&addr)
|
||||
.expect("Reading TM from pool failed");
|
||||
let mut zero_copy_writer = PusTmZeroCopyWriter::new(tm_raw)
|
||||
.expect("Creating TM zero copy writer failed");
|
||||
zero_copy_writer.set_apid(PUS_APID);
|
||||
zero_copy_writer.set_seq_count(seq_count_provider.get_and_increment());
|
||||
let entry = msg_counter_map
|
||||
.entry(zero_copy_writer.service())
|
||||
.or_insert(0);
|
||||
zero_copy_writer.set_msg_count(*entry);
|
||||
if *entry == u16::MAX {
|
||||
*entry = 0;
|
||||
} else {
|
||||
*entry += 1;
|
||||
}
|
||||
|
||||
// This operation has to come last!
|
||||
zero_copy_writer.finish();
|
||||
tm_funnel
|
||||
.tm_server_tx
|
||||
.send(addr)
|
||||
.expect("Sending TM to server failed");
|
||||
sync_tm_tcp_source.add_tm(tm_raw);
|
||||
}
|
||||
}
|
||||
.spawn(move || loop {
|
||||
tm_funnel.operation();
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting event handling task");
|
||||
let jh2 = thread::Builder::new()
|
||||
let jh_event_handling = thread::Builder::new()
|
||||
.name("Event".to_string())
|
||||
.spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut sender = MpscTmInStoreSender::new(
|
||||
TmSenderId::AllEvents as ChannelId,
|
||||
"ALL_EVENTS_TX",
|
||||
tm_store_event.clone(),
|
||||
tm_funnel_tx,
|
||||
);
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
let report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
|
||||
let started_token: VerificationToken<TcStateStarted> = event_req
|
||||
.token
|
||||
.try_into()
|
||||
.expect("expected start verification token");
|
||||
reporter_event_handler
|
||||
.completion_success(started_token, Some(timestamp))
|
||||
.expect("Sending completion success failed");
|
||||
};
|
||||
loop {
|
||||
// handle event requests
|
||||
if let Ok(event_req) = event_request_rx.try_recv() {
|
||||
match event_req.request {
|
||||
EventRequest::Enable(event) => {
|
||||
pus_event_dispatcher
|
||||
.enable_tm_for_event(&event)
|
||||
.expect("Enabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
EventRequest::Disable(event) => {
|
||||
pus_event_dispatcher
|
||||
.disable_tm_for_event(&event)
|
||||
.expect("Disabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the event routing.
|
||||
event_man
|
||||
.try_event_handling()
|
||||
.expect("event handling failed");
|
||||
|
||||
// Perform the generation of PUS event packets
|
||||
if let Ok((event, _param)) = pus_event_man_rx.try_recv() {
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
pus_event_dispatcher
|
||||
.generate_pus_event_tm_generic(&mut sender, ×tamp, event, None)
|
||||
.expect("Sending TM as event failed");
|
||||
}
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
}
|
||||
.spawn(move || loop {
|
||||
event_handler.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_EVENT_HANDLING));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting AOCS thread");
|
||||
let jh3 = thread::Builder::new()
|
||||
let jh_aocs = thread::Builder::new()
|
||||
.name("AOCS".to_string())
|
||||
.spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
loop {
|
||||
// TODO: Move this into a separate function/task/module..
|
||||
match acs_thread_rx.try_recv() {
|
||||
Ok(request) => {
|
||||
info!(
|
||||
"ACS thread: Received HK request {:?}",
|
||||
request.targeted_request
|
||||
);
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
match request.targeted_request.request {
|
||||
Request::Hk(hk_req) => match hk_req {
|
||||
HkRequest::OneShot(unique_id) => {
|
||||
// TODO: We should check whether the unique ID is even valid.
|
||||
let target = request.targeted_request.target_id;
|
||||
assert_eq!(
|
||||
target.target_id(),
|
||||
RequestTargetId::AcsSubsystem as u32
|
||||
);
|
||||
if request.targeted_request.target_id.target
|
||||
== AcsHkIds::TestMgmSet as u32
|
||||
{
|
||||
let mut sp_header = SpHeader::tm(
|
||||
PUS_APID,
|
||||
SequenceFlags::Unsegmented,
|
||||
0,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
let sec_header = PusTmSecondaryHeader::new_simple(
|
||||
3,
|
||||
HkSubservice::TmHkPacket as u8,
|
||||
×tamp,
|
||||
);
|
||||
let mut buf: [u8; 8] = [0; 8];
|
||||
let hk_id = HkUniqueId::new(target.target_id(), unique_id);
|
||||
hk_id.write_to_be_bytes(&mut buf).unwrap();
|
||||
let pus_tm = PusTmCreator::new(
|
||||
&mut sp_header,
|
||||
sec_header,
|
||||
&buf,
|
||||
true,
|
||||
);
|
||||
let addr = aocs_tm_store
|
||||
.add_pus_tm(&pus_tm)
|
||||
.expect("Adding PUS TM failed");
|
||||
aocs_tm_funnel.send(addr).expect("Sending HK TM failed");
|
||||
}
|
||||
}
|
||||
HkRequest::Enable(_) => {}
|
||||
HkRequest::Disable(_) => {}
|
||||
HkRequest::ModifyCollectionInterval(_, _) => {}
|
||||
},
|
||||
Request::Mode(_mode_req) => {
|
||||
warn!("mode request handling not implemented yet")
|
||||
}
|
||||
Request::Action(_action_req) => {
|
||||
warn!("action request handling not implemented yet")
|
||||
}
|
||||
}
|
||||
let started_token = reporter_aocs
|
||||
.start_success(request.token, Some(×tamp))
|
||||
.expect("Sending start success failed");
|
||||
reporter_aocs
|
||||
.completion_success(started_token, Some(×tamp))
|
||||
.expect("Sending completion success failed");
|
||||
}
|
||||
Err(e) => match e {
|
||||
TryRecvError::Empty => {}
|
||||
TryRecvError::Disconnected => {
|
||||
warn!("ACS thread: Message Queue TX disconnected!")
|
||||
}
|
||||
},
|
||||
}
|
||||
thread::sleep(Duration::from_millis(500));
|
||||
}
|
||||
.spawn(move || loop {
|
||||
acs_task.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting PUS handler thread");
|
||||
let jh4 = thread::Builder::new()
|
||||
let jh_pus_handler = thread::Builder::new()
|
||||
.name("PUS".to_string())
|
||||
.spawn(move || loop {
|
||||
pus_11_wrapper.release_tcs();
|
||||
loop {
|
||||
let mut all_queues_empty = true;
|
||||
let mut is_srv_finished = |srv_handler_finished: bool| {
|
||||
if !srv_handler_finished {
|
||||
all_queues_empty = false;
|
||||
}
|
||||
};
|
||||
is_srv_finished(pus_17_wrapper.handle_next_packet());
|
||||
is_srv_finished(pus_11_wrapper.handle_next_packet());
|
||||
is_srv_finished(pus_5_wrapper.handle_next_packet());
|
||||
is_srv_finished(pus_8_wrapper.handle_next_packet());
|
||||
is_srv_finished(pus_3_wrapper.handle_next_packet());
|
||||
if all_queues_empty {
|
||||
break;
|
||||
}
|
||||
}
|
||||
thread::sleep(Duration::from_millis(200));
|
||||
pus_stack.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_PUS_STACK));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
jh_udp_tmtc
|
||||
.join()
|
||||
.expect("Joining UDP TMTC server thread failed");
|
||||
jh_tcp
|
||||
.join()
|
||||
.expect("Joining TCP TMTC server thread failed");
|
||||
jh1.join().expect("Joining TM Funnel thread failed");
|
||||
jh2.join().expect("Joining Event Manager thread failed");
|
||||
jh3.join().expect("Joining AOCS thread failed");
|
||||
jh4.join().expect("Joining PUS handler thread failed");
|
||||
jh_tm_funnel
|
||||
.join()
|
||||
.expect("Joining TM Funnel thread failed");
|
||||
jh_event_handling
|
||||
.join()
|
||||
.expect("Joining Event Manager thread failed");
|
||||
jh_aocs.join().expect("Joining AOCS thread failed");
|
||||
jh_pus_handler
|
||||
.join()
|
||||
.expect("Joining PUS handler thread failed");
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn dyn_tmtc_pool_main() {
|
||||
let (tc_source_tx, tc_source_rx) = channel();
|
||||
let (tm_funnel_tx, tm_funnel_rx) = channel();
|
||||
let (tm_server_tx, tm_server_rx) = channel();
|
||||
// Every software component which needs to generate verification telemetry, gets a cloned
|
||||
// verification reporter.
|
||||
let verif_reporter = create_verification_reporter(MpscTmAsVecSender::new(
|
||||
TmSenderId::PusVerification as ChannelId,
|
||||
"verif_sender",
|
||||
tm_funnel_tx.clone(),
|
||||
));
|
||||
|
||||
let acs_target_id = TargetIdWithApid::new(PUS_APID, RequestTargetId::AcsSubsystem as TargetId);
|
||||
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
|
||||
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||
let mut request_map = HashMap::new();
|
||||
request_map.insert(acs_target_id, acs_thread_tx);
|
||||
|
||||
let tc_source = PusTcSourceProviderDynamic(tc_source_tx);
|
||||
|
||||
// Create event handling components
|
||||
// These sender handles are used to send event requests, for example to enable or disable
|
||||
// certain events.
|
||||
let (event_request_tx, event_request_rx) = mpsc::channel::<EventRequestWithToken>();
|
||||
// The event task is the core handler to perform the event routing and TM handling as specified
|
||||
// in the sat-rs documentation.
|
||||
let mut event_handler = EventHandler::new(
|
||||
MpscTmAsVecSender::new(
|
||||
TmSenderId::AllEvents as ChannelId,
|
||||
"ALL_EVENTS_TX",
|
||||
tm_funnel_tx.clone(),
|
||||
),
|
||||
verif_reporter.clone(),
|
||||
event_request_rx,
|
||||
);
|
||||
|
||||
let (pus_test_tx, pus_test_rx) = channel();
|
||||
let (pus_event_tx, pus_event_rx) = channel();
|
||||
let (pus_sched_tx, pus_sched_rx) = channel();
|
||||
let (pus_hk_tx, pus_hk_rx) = channel();
|
||||
let (pus_action_tx, pus_action_rx) = channel();
|
||||
let pus_router = PusTcMpscRouter {
|
||||
test_service_receiver: pus_test_tx,
|
||||
event_service_receiver: pus_event_tx,
|
||||
sched_service_receiver: pus_sched_tx,
|
||||
hk_service_receiver: pus_hk_tx,
|
||||
action_service_receiver: pus_action_tx,
|
||||
};
|
||||
|
||||
let pus_test_service = create_test_service_dynamic(
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
event_handler.clone_event_sender(),
|
||||
pus_test_rx,
|
||||
);
|
||||
let pus_scheduler_service = create_scheduler_service_dynamic(
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
tc_source.0.clone(),
|
||||
pus_sched_rx,
|
||||
create_sched_tc_pool(),
|
||||
);
|
||||
|
||||
let pus_event_service = create_event_service_dynamic(
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
pus_event_rx,
|
||||
event_request_tx,
|
||||
);
|
||||
let pus_action_service = create_action_service_dynamic(
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
pus_action_rx,
|
||||
request_map.clone(),
|
||||
);
|
||||
let pus_hk_service = create_hk_service_dynamic(
|
||||
tm_funnel_tx.clone(),
|
||||
verif_reporter.clone(),
|
||||
pus_hk_rx,
|
||||
request_map,
|
||||
);
|
||||
let mut pus_stack = PusStack::new(
|
||||
pus_hk_service,
|
||||
pus_event_service,
|
||||
pus_action_service,
|
||||
pus_scheduler_service,
|
||||
pus_test_service,
|
||||
);
|
||||
|
||||
let ccsds_receiver = CcsdsReceiver { tc_source };
|
||||
|
||||
let mut tmtc_task = TmtcTaskDynamic::new(
|
||||
tc_source_rx,
|
||||
PusReceiver::new(verif_reporter.clone(), pus_router),
|
||||
);
|
||||
|
||||
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
||||
let udp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver.clone()));
|
||||
let udp_tc_server = UdpTcServer::new(sock_addr, 2048, Box::new(udp_ccsds_distributor))
|
||||
.expect("creating UDP TMTC server failed");
|
||||
let mut udp_tmtc_server = UdpTmtcServer {
|
||||
udp_tc_server,
|
||||
tm_handler: DynamicUdpTmHandler {
|
||||
tm_rx: tm_server_rx,
|
||||
},
|
||||
};
|
||||
|
||||
let tcp_ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver));
|
||||
let tcp_server_cfg = ServerConfig::new(sock_addr, Duration::from_millis(400), 4096, 8192);
|
||||
let sync_tm_tcp_source = SyncTcpTmSource::new(200);
|
||||
let mut tcp_server = TcpTask::new(
|
||||
tcp_server_cfg,
|
||||
sync_tm_tcp_source.clone(),
|
||||
tcp_ccsds_distributor,
|
||||
)
|
||||
.expect("tcp server creation failed");
|
||||
|
||||
let mut acs_task = AcsTask::new(
|
||||
MpscTmAsVecSender::new(
|
||||
TmSenderId::AcsSubsystem as ChannelId,
|
||||
"ACS_TASK_SENDER",
|
||||
tm_funnel_tx.clone(),
|
||||
),
|
||||
acs_thread_rx,
|
||||
verif_reporter,
|
||||
);
|
||||
let mut tm_funnel = TmFunnelDynamic::new(sync_tm_tcp_source, tm_funnel_rx, tm_server_tx);
|
||||
|
||||
info!("Starting TMTC and UDP task");
|
||||
let jh_udp_tmtc = thread::Builder::new()
|
||||
.name("TMTC and UDP".to_string())
|
||||
.spawn(move || {
|
||||
info!("Running UDP server on port {SERVER_PORT}");
|
||||
loop {
|
||||
udp_tmtc_server.periodic_operation();
|
||||
tmtc_task.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_UDP_TMTC));
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting TCP task");
|
||||
let jh_tcp = thread::Builder::new()
|
||||
.name("TCP".to_string())
|
||||
.spawn(move || {
|
||||
info!("Running TCP server on port {SERVER_PORT}");
|
||||
loop {
|
||||
tcp_server.periodic_operation();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting TM funnel task");
|
||||
let jh_tm_funnel = thread::Builder::new()
|
||||
.name("TM Funnel".to_string())
|
||||
.spawn(move || loop {
|
||||
tm_funnel.operation();
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting event handling task");
|
||||
let jh_event_handling = thread::Builder::new()
|
||||
.name("Event".to_string())
|
||||
.spawn(move || loop {
|
||||
event_handler.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_EVENT_HANDLING));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting AOCS thread");
|
||||
let jh_aocs = thread::Builder::new()
|
||||
.name("AOCS".to_string())
|
||||
.spawn(move || loop {
|
||||
acs_task.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_AOCS));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting PUS handler thread");
|
||||
let jh_pus_handler = thread::Builder::new()
|
||||
.name("PUS".to_string())
|
||||
.spawn(move || loop {
|
||||
pus_stack.periodic_operation();
|
||||
thread::sleep(Duration::from_millis(FREQ_MS_PUS_STACK));
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
jh_udp_tmtc
|
||||
.join()
|
||||
.expect("Joining UDP TMTC server thread failed");
|
||||
jh_tcp
|
||||
.join()
|
||||
.expect("Joining TCP TMTC server thread failed");
|
||||
jh_tm_funnel
|
||||
.join()
|
||||
.expect("Joining TM Funnel thread failed");
|
||||
jh_event_handling
|
||||
.join()
|
||||
.expect("Joining Event Manager thread failed");
|
||||
jh_aocs.join().expect("Joining AOCS thread failed");
|
||||
jh_pus_handler
|
||||
.join()
|
||||
.expect("Joining PUS handler thread failed");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
setup_logger().expect("setting up logging with fern failed");
|
||||
println!("Running OBSW example");
|
||||
#[cfg(not(feature = "dyn_tmtc"))]
|
||||
static_tmtc_pool_main();
|
||||
#[cfg(feature = "dyn_tmtc")]
|
||||
dyn_tmtc_pool_main();
|
||||
}
|
||||
|
||||
pub fn update_time(time_provider: &mut TimeProvider, timestamp: &mut [u8]) {
|
||||
|
Reference in New Issue
Block a user