488 lines
16 KiB
Rust
488 lines
16 KiB
Rust
#![allow(dead_code)]
|
|
#![allow(unused_variables)]
|
|
// remove this, just here for now since application isn't fully developed
|
|
|
|
mod action;
|
|
mod aocs;
|
|
mod cam;
|
|
#[cfg(feature = "can")]
|
|
mod can;
|
|
mod can_ids;
|
|
mod ccsds;
|
|
mod helpers;
|
|
mod hk;
|
|
mod logger;
|
|
mod messages;
|
|
mod pld_handler;
|
|
mod power_handler;
|
|
mod pus;
|
|
mod requests;
|
|
mod tmtc;
|
|
|
|
use crate::requests::RequestWithToken;
|
|
use crate::tmtc::{core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, AOCS_APID, AOCS_HK_APID, CSS_APID, MGM_APID, MGT_APID, PLD_APID, PUS_APID, PWR_APID, RWL_APID, STR_APID, GPS_APID};
|
|
use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT};
|
|
use satrs_core::event_man::{
|
|
EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, SendEventProvider,
|
|
};
|
|
use satrs_core::events::EventU32;
|
|
use satrs_core::pool::{LocalPool, PoolCfg};
|
|
use satrs_core::pus::event_man::{
|
|
DefaultPusMgmtBackendProvider, EventReporter, EventRequestWithToken, PusEventDispatcher,
|
|
};
|
|
use satrs_core::pus::verification::{
|
|
MpscVerifSender, VerificationReporterCfg, VerificationReporterWithSender,
|
|
};
|
|
use satrs_core::seq_count::SeqCountProviderSyncClonable;
|
|
use strum::IntoEnumIterator;
|
|
|
|
use crate::can_ids::{
|
|
load_message_id_to_apids, load_message_id_to_threads, DeviceId, MessageModel,
|
|
};
|
|
use chrono::Duration;
|
|
use log::info;
|
|
use satrs_core::power::{SwitchId, SwitchState};
|
|
use std::collections::HashMap;
|
|
use std::net::{IpAddr, SocketAddr};
|
|
use std::sync::mpsc::channel;
|
|
use std::sync::{mpsc, Arc, Mutex, RwLock};
|
|
use std::thread;
|
|
//use libc::time64_t;
|
|
use crate::aocs::{AocsController, MGMHandler, STRHandler, GPSHandler, CSSHandler};
|
|
#[cfg(feature = "can")]
|
|
use crate::can::CanTxHandler;
|
|
use crate::helpers::{ModeHelper, VerifHelper};
|
|
use crate::hk::{AocsDataMap, AocsHousekeeper};
|
|
use crate::pld_handler::core_pld_task;
|
|
use crate::power_handler::{core_power_task, DeviceState, PowerSwitcher};
|
|
|
|
fn main() {
|
|
logger::setup_logger().unwrap();
|
|
info!("Running DemoSat OBSW!");
|
|
|
|
let tm_pool = LocalPool::new(PoolCfg::new(vec![
|
|
(30, 32),
|
|
(15, 64),
|
|
(15, 128),
|
|
(15, 256),
|
|
(15, 1024),
|
|
(15, 2048),
|
|
]));
|
|
let tm_store = TmStore {
|
|
pool: Arc::new(RwLock::new(Box::new(tm_pool))),
|
|
};
|
|
let tc_pool = LocalPool::new(PoolCfg::new(vec![
|
|
(30, 32),
|
|
(15, 64),
|
|
(15, 128),
|
|
(15, 256),
|
|
(15, 1024),
|
|
(15, 2048),
|
|
]));
|
|
let tc_store = TcStore {
|
|
pool: Arc::new(RwLock::new(Box::new(tc_pool))),
|
|
};
|
|
|
|
let seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let msg_count_provider = SeqCountProviderSyncClonable::default();
|
|
let aocs_hk_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let aocs_ctrl_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let mgm_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let css_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let str_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let gps_seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let verif_seq_count_provider = seq_count_provider.clone();
|
|
let tmtc_seq_count_provider = seq_count_provider.clone();
|
|
|
|
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
|
let (tc_source_tx, tc_source_rx) = channel();
|
|
let (tm_funnel_tx, tm_funnel_rx) = channel();
|
|
let (tm_server_tx, tm_server_rx) = channel();
|
|
let verif_sender = MpscVerifSender::new(1, "name", tm_store.pool.clone(), tm_funnel_tx.clone());
|
|
let verif_cfg = VerificationReporterCfg::new(
|
|
PUS_APID,
|
|
Box::new(verif_seq_count_provider.clone()),
|
|
Box::new(msg_count_provider),
|
|
1,
|
|
2,
|
|
8,
|
|
)
|
|
.unwrap();
|
|
|
|
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
|
|
|
|
// Create event handling components
|
|
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
|
|
let (event_sender, event_man_rx) = channel();
|
|
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
|
|
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
|
|
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
|
|
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
|
|
let pus_event_dispatcher = PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
|
|
let (pus_event_man_tx, pus_event_man_rx) = channel();
|
|
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
|
|
let reporter_event_handler = verif_reporter.clone();
|
|
let reporter_aocs = verif_reporter.clone();
|
|
let mut reporter_pld = verif_reporter.clone();
|
|
event_man.subscribe_all(pus_event_man_send_provider.id());
|
|
|
|
// possibly deprecated, as the apid should replace the target id
|
|
let mut request_map = HashMap::new();
|
|
let (aocs_thread_tx, aocs_thread_rx) = channel::<RequestWithToken>();
|
|
let (pld_thread_tx, pld_thread_rx) = channel::<RequestWithToken>();
|
|
let (pwr_thread_tx, pwr_thread_rx) = channel::<RequestWithToken>();
|
|
let (aocs_hk_tx, aocs_hk_rx) = channel::<RequestWithToken>();
|
|
let (mgm_tx, mgm_rx) = channel::<RequestWithToken>();
|
|
let (css_tx, css_rx) = channel::<RequestWithToken>();
|
|
let (str_tx, str_rx) = channel::<RequestWithToken>();
|
|
let (gps_tx, gps_rx) = channel::<RequestWithToken>();
|
|
let (mgt_tx, mgt_rx) = channel::<RequestWithToken>();
|
|
let (rwl_tx, rwl_rx) = channel::<RequestWithToken>();
|
|
request_map.insert(RequestTargetId::AcsSubsystem as u32, aocs_thread_tx.clone());
|
|
request_map.insert(RequestTargetId::PldSubsystem as u32, pld_thread_tx.clone());
|
|
//add here receivers for tmtc task to send requests to
|
|
//request_map.insert(RequestTargetId::CanTask as u32, can_thread_tx);
|
|
|
|
let tc_source = PusTcSource {
|
|
tc_store,
|
|
tc_source: tc_source_tx,
|
|
};
|
|
|
|
let mut apid_map = HashMap::new();
|
|
apid_map.insert(PLD_APID, pld_thread_tx.clone());
|
|
apid_map.insert(PWR_APID, pwr_thread_tx.clone());
|
|
apid_map.insert(AOCS_APID, aocs_thread_tx.clone());
|
|
apid_map.insert(AOCS_HK_APID, aocs_hk_tx.clone());
|
|
apid_map.insert(MGM_APID, mgm_tx.clone());
|
|
apid_map.insert(CSS_APID, css_tx.clone());
|
|
apid_map.insert(STR_APID, str_tx.clone());
|
|
apid_map.insert(GPS_APID, gps_tx.clone());
|
|
apid_map.insert(MGT_APID, mgt_tx.clone());
|
|
apid_map.insert(RWL_APID, rwl_tx.clone());
|
|
|
|
// Create clones here to allow moving the values
|
|
let core_args = OtherArgs {
|
|
sock_addr,
|
|
verif_reporter: verif_reporter.clone(),
|
|
event_sender,
|
|
event_request_tx,
|
|
request_map,
|
|
apid_map,
|
|
seq_count_provider: verif_seq_count_provider,
|
|
};
|
|
let tc_args = TcArgs {
|
|
tc_source,
|
|
tc_receiver: tc_source_rx,
|
|
};
|
|
let tm_args = TmArgs {
|
|
tm_store: tm_store.clone(),
|
|
tm_sink_sender: tm_funnel_tx.clone(),
|
|
tm_server_rx,
|
|
};
|
|
|
|
let (mgm_can_tx, mgm_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (mgt_can_tx, mgt_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (css_can_tx, css_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (rwl_can_tx, rwl_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (str_can_tx, str_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (gps_can_tx, gps_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (power_can_tx, power_can_rx) = mpsc::channel::<MessageModel>();
|
|
let (pld_can_tx, pld_can_rx) = mpsc::channel::<MessageModel>();
|
|
|
|
// make tx thread id hashmap
|
|
let mut can_senders = HashMap::new();
|
|
can_senders.insert(PWR_APID, power_can_tx);
|
|
can_senders.insert(PLD_APID, pld_can_tx);
|
|
can_senders.insert(MGM_APID, mgm_can_tx);
|
|
can_senders.insert(MGT_APID, mgt_can_tx);
|
|
can_senders.insert(RWL_APID, rwl_can_tx);
|
|
can_senders.insert(CSS_APID, css_can_tx);
|
|
can_senders.insert(STR_APID, str_can_tx);
|
|
can_senders.insert(GPS_APID, gps_can_tx);
|
|
|
|
// get package id hashmap
|
|
let message_ids_rx = load_message_id_to_apids();
|
|
|
|
info!("Starting TMTC task");
|
|
let builder0 = thread::Builder::new().name("TMTCThread".into());
|
|
let jh0 = builder0.spawn(move || {
|
|
core_tmtc_task(core_args, tc_args, tm_args);
|
|
});
|
|
|
|
let (can_tx_sender, can_tx_receiver) = channel();
|
|
|
|
#[cfg(feature = "can")]
|
|
let mut can_rx_socket =
|
|
can::CanRxHandler::new_socket("can0", can_senders.clone(), message_ids_rx.clone()).unwrap();
|
|
|
|
#[cfg(feature = "can")]
|
|
info!("Starting CAN Socket listening task");
|
|
let builder1 = thread::Builder::new().name("CanRxHandler".into());
|
|
let jh1 = builder1.spawn(move || loop {
|
|
#[cfg(feature = "can")]
|
|
can_rx_socket.process_incoming();
|
|
});
|
|
|
|
#[cfg(feature = "can")]
|
|
let mut can_tx_socket =
|
|
CanTxHandler::new_socket("can0", message_ids_rx.clone(), can_tx_receiver).unwrap();
|
|
|
|
#[cfg(feature = "can")]
|
|
info!("Starting CAN Socket writing task");
|
|
let builder_can_tx = thread::Builder::new().name("TxHandler".into());
|
|
let jh_can_tx = builder_can_tx.spawn(move || loop {
|
|
#[cfg(feature = "can")]
|
|
can_tx_socket.process_incoming();
|
|
});
|
|
|
|
let (pcdu_tx, pcdu_rx) = mpsc::channel::<(SwitchId, SwitchState)>();
|
|
let pcdu_can_tx_sender = can_tx_sender.clone();
|
|
|
|
let mut device_state_map = HashMap::new();
|
|
for id in DeviceId::iter() {
|
|
device_state_map.insert(id, SwitchState::Off);
|
|
}
|
|
let clonable_device_state_map = Arc::new(Mutex::new(device_state_map));
|
|
|
|
let power_switcher = PowerSwitcher::new(pcdu_tx, clonable_device_state_map.clone());
|
|
|
|
info!("Starting Power Handling task");
|
|
let builder2 = thread::Builder::new().name("PowerThread".into());
|
|
let jh2 = builder2.spawn(move || {
|
|
core_power_task(
|
|
pcdu_rx,
|
|
pcdu_can_tx_sender,
|
|
power_can_rx,
|
|
clonable_device_state_map.clone(),
|
|
);
|
|
});
|
|
|
|
let package_map_pld_tx = load_message_id_to_threads();
|
|
let pld_tm_funnel_tx = tm_funnel_tx.clone();
|
|
let pld_tm_store = tm_store.clone();
|
|
|
|
let pld_can_tx_sender = can_tx_sender.clone();
|
|
|
|
let power_switcher_pld = power_switcher.clone();
|
|
|
|
//let mut pcdu_tx_clone = pcdu_tx.clone();
|
|
info!("Starting Payload Handling task");
|
|
let builder3 = thread::Builder::new().name("PLDThread".into());
|
|
let jh3 = builder3.spawn(move || {
|
|
core_pld_task(
|
|
power_switcher_pld.clone(),
|
|
pld_thread_rx,
|
|
pld_can_rx,
|
|
pld_can_tx_sender,
|
|
&mut reporter_pld,
|
|
);
|
|
});
|
|
|
|
info!("Starting TM funnel task");
|
|
let builder4 = thread::Builder::new().name("TMFunnelThread".into());
|
|
let jh4 = builder4.spawn(move || {
|
|
let tm_funnel = TmFunnel {
|
|
tm_server_tx,
|
|
tm_funnel_rx,
|
|
};
|
|
loop {
|
|
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
|
|
tm_funnel
|
|
.tm_server_tx
|
|
.send(addr)
|
|
.expect("Sending TM to server failed");
|
|
}
|
|
}
|
|
});
|
|
|
|
let package_map_aocs_tx = load_message_id_to_threads();
|
|
let aocs_tm_funnel_tx = tm_funnel_tx.clone();
|
|
let aocs_tm_store = tm_store.clone();
|
|
|
|
let aocs_data_not_threadsafe = AocsDataMap::new();
|
|
/*
|
|
aocs_data_not_threadsafe
|
|
.update_value(1, AocsDataType::float_value(1.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(2, AocsDataType::float_value(2.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(3, AocsDataType::float_value(3.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(4, AocsDataType::float_value(4.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(5, AocsDataType::float_value(5.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(6, AocsDataType::float_value(6.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(7, AocsDataType::float_value(7.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(8, AocsDataType::float_value(8.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(9, AocsDataType::float_value(9.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(10, AocsDataType::float_value(10.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(11, AocsDataType::float_value(11.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(12, AocsDataType::float_value(12.0))
|
|
.unwrap();
|
|
aocs_data_not_threadsafe
|
|
.update_value(13, AocsDataType::float_value(13.0))
|
|
.unwrap();
|
|
*/
|
|
let aocs_data = Arc::new(Mutex::new(aocs_data_not_threadsafe));
|
|
|
|
info!("Starting AOCS task");
|
|
let builder5 = thread::Builder::new().name("AOCSThread".into());
|
|
let jh5 = builder5.spawn(move || {
|
|
let mut aocs_housekeeper = AocsHousekeeper::new_with_collection_interval(
|
|
aocs_data.clone(),
|
|
aocs_hk_rx,
|
|
aocs_hk_seq_count_provider,
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
reporter_aocs.clone(),
|
|
Duration::seconds(1),
|
|
);
|
|
|
|
let aocs_verif_helper = VerifHelper::new(verif_reporter.clone());
|
|
let aocs_controller_mode_helper = ModeHelper::new(
|
|
AOCS_APID,
|
|
aocs_ctrl_seq_count_provider.clone(),
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
);
|
|
|
|
let mut aocs_controller = AocsController::new(
|
|
aocs_thread_rx,
|
|
mgm_tx.clone(),
|
|
css_tx.clone(),
|
|
str_tx.clone(),
|
|
gps_tx.clone(),
|
|
aocs_ctrl_seq_count_provider.clone(),
|
|
aocs_verif_helper.clone(),
|
|
aocs_controller_mode_helper,
|
|
);
|
|
|
|
let mgm_mode_helper = ModeHelper::new(
|
|
MGM_APID,
|
|
mgm_seq_count_provider,
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
);
|
|
|
|
let mut mgm_handler = MGMHandler::new(
|
|
power_switcher.clone(),
|
|
DeviceId::MGM1,
|
|
DeviceState::Off,
|
|
aocs_data.clone(),
|
|
can_tx_sender.clone(),
|
|
mgm_can_rx,
|
|
mgm_rx,
|
|
aocs_verif_helper.clone(),
|
|
mgm_mode_helper,
|
|
);
|
|
|
|
let str_mode_helper = ModeHelper::new(
|
|
STR_APID,
|
|
str_seq_count_provider,
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
);
|
|
|
|
let mut str_handler = STRHandler::new(
|
|
power_switcher.clone(),
|
|
DeviceId::StarTracker,
|
|
DeviceState::Off,
|
|
aocs_data.clone(),
|
|
can_tx_sender.clone(),
|
|
str_can_rx,
|
|
str_rx,
|
|
aocs_verif_helper.clone(),
|
|
str_mode_helper,
|
|
);
|
|
|
|
let css_mode_helper = ModeHelper::new(
|
|
CSS_APID,
|
|
css_seq_count_provider,
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
);
|
|
|
|
let mut css_handler = CSSHandler::new(
|
|
power_switcher.clone(),
|
|
DeviceId::SunSensor1,
|
|
DeviceState::Off,
|
|
aocs_data.clone(),
|
|
can_tx_sender.clone(),
|
|
css_can_rx,
|
|
css_rx,
|
|
aocs_verif_helper.clone(),
|
|
css_mode_helper,
|
|
);
|
|
|
|
let gps_mode_helper = ModeHelper::new(
|
|
GPS_APID,
|
|
gps_seq_count_provider,
|
|
aocs_tm_store.clone(),
|
|
aocs_tm_funnel_tx.clone(),
|
|
);
|
|
|
|
let mut gps_handler = GPSHandler::new(
|
|
power_switcher.clone(),
|
|
DeviceId::GPS,
|
|
DeviceState::Off,
|
|
aocs_data.clone(),
|
|
can_tx_sender.clone(),
|
|
gps_can_rx,
|
|
gps_rx,
|
|
aocs_verif_helper.clone(),
|
|
gps_mode_helper,
|
|
);
|
|
|
|
loop {
|
|
aocs_housekeeper.periodic_op();
|
|
aocs_controller.periodic_op();
|
|
mgm_handler.periodic_op();
|
|
str_handler.periodic_op();
|
|
css_handler.periodic_op();
|
|
gps_handler.periodic_op();
|
|
}
|
|
});
|
|
|
|
jh0.unwrap()
|
|
.join()
|
|
.expect("Joining UDP TMTC server thread failed");
|
|
|
|
jh1.unwrap()
|
|
.join()
|
|
.expect("Joining CAN Bus Listening thread failed");
|
|
jh_can_tx
|
|
.unwrap()
|
|
.join()
|
|
.expect("Joining CAN Bus Writing thread failed");
|
|
jh2.unwrap().join().expect("Joining power thread failed");
|
|
jh3.unwrap().join().expect("Joining PLD thread failed");
|
|
jh4.unwrap()
|
|
.join()
|
|
.expect("Joining TM funnel thread failed");
|
|
jh5.unwrap().join().expect("Joining AOCS thread failed");
|
|
}
|
|
#[derive(Default)]
|
|
struct MgmData {
|
|
x: i16,
|
|
y: i16,
|
|
z: i16,
|
|
}
|