417 lines
15 KiB
Rust
417 lines
15 KiB
Rust
mod action;
|
|
mod aocs;
|
|
mod aocs_handler;
|
|
mod cam;
|
|
#[cfg(feature = "can")]
|
|
mod can;
|
|
mod can_ids;
|
|
mod ccsds;
|
|
mod hk;
|
|
mod logger;
|
|
mod pld_handler;
|
|
mod power_handler;
|
|
mod pus;
|
|
mod requests;
|
|
mod tmtc;
|
|
|
|
use crate::requests::{RequestWithToken};
|
|
use crate::tmtc::{
|
|
core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, PUS_APID,
|
|
};
|
|
use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT};
|
|
use satrs_core::event_man::{
|
|
EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, SendEventProvider,
|
|
};
|
|
use satrs_core::events::EventU32;
|
|
use satrs_core::pool::{LocalPool, PoolCfg, StoreAddr};
|
|
use satrs_core::pus::event_man::{
|
|
DefaultPusMgmtBackendProvider, EventReporter, EventRequestWithToken, PusEventDispatcher,
|
|
};
|
|
use satrs_core::pus::verification::{
|
|
MpscVerifSender, VerificationReporterCfg, VerificationReporterWithSender,
|
|
};
|
|
use satrs_core::pus::{EcssTmErrorWithSend, EcssTmSenderCore};
|
|
use satrs_core::seq_count::SeqCountProviderSyncClonable;
|
|
use satrs_core::{spacepackets::tm::PusTm};
|
|
use strum::IntoEnumIterator;
|
|
|
|
use crate::can_ids::{
|
|
load_package_ids, DeviceId, PackageModel, ThreadId,
|
|
};
|
|
use log::{info};
|
|
use satrs_core::power::{SwitchId, SwitchState};
|
|
use std::collections::HashMap;
|
|
use std::net::{IpAddr, SocketAddr};
|
|
use std::sync::mpsc::channel;
|
|
use std::sync::{mpsc, Arc, Mutex, RwLock};
|
|
use std::thread;
|
|
//use libc::time64_t;
|
|
use crate::action::ActionRequest;
|
|
use crate::aocs_handler::MGMData;
|
|
#[cfg(feature = "can")]
|
|
use crate::can::CanTxHandler;
|
|
use crate::hk::{AocsHousekeeper, AocsSensorData};
|
|
use crate::pld_handler::{core_pld_task};
|
|
use crate::power_handler::{core_power_task, PowerSwitcher};
|
|
|
|
#[derive(Clone)]
|
|
struct EventTmSender {
|
|
store_helper: TmStore,
|
|
sender: mpsc::Sender<StoreAddr>,
|
|
}
|
|
|
|
impl EventTmSender {
|
|
fn new(store_helper: TmStore, sender: mpsc::Sender<StoreAddr>) -> Self {
|
|
Self {
|
|
store_helper,
|
|
sender,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl EcssTmSenderCore for EventTmSender {
|
|
type Error = mpsc::SendError<StoreAddr>;
|
|
|
|
fn send_tm(&mut self, tm: PusTm) -> Result<(), EcssTmErrorWithSend<Self::Error>> {
|
|
let addr = self.store_helper.add_pus_tm(&tm);
|
|
self.sender
|
|
.send(addr)
|
|
.map_err(EcssTmErrorWithSend::SendError)
|
|
}
|
|
}
|
|
|
|
fn main() {
|
|
println!("Running SESPSat OBSW");
|
|
|
|
logger::setup_logger().unwrap();
|
|
|
|
let tm_pool = LocalPool::new(PoolCfg::new(vec![
|
|
(30, 32),
|
|
(15, 64),
|
|
(15, 128),
|
|
(15, 256),
|
|
(15, 1024),
|
|
(15, 2048),
|
|
]));
|
|
let tm_store = TmStore {
|
|
pool: Arc::new(RwLock::new(Box::new(tm_pool))),
|
|
};
|
|
let tc_pool = LocalPool::new(PoolCfg::new(vec![
|
|
(30, 32),
|
|
(15, 64),
|
|
(15, 128),
|
|
(15, 256),
|
|
(15, 1024),
|
|
(15, 2048),
|
|
]));
|
|
let tc_store = TcStore {
|
|
pool: Arc::new(RwLock::new(Box::new(tc_pool))),
|
|
};
|
|
|
|
let seq_count_provider = SeqCountProviderSyncClonable::default();
|
|
let aocs_seq_count_provider = seq_count_provider.clone();
|
|
|
|
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
|
|
let (tc_source_tx, tc_source_rx) = channel();
|
|
let (tm_funnel_tx, tm_funnel_rx) = channel();
|
|
let (tm_server_tx, tm_server_rx) = channel();
|
|
let verif_sender = MpscVerifSender::new(tm_store.pool.clone(), tm_funnel_tx.clone());
|
|
let verif_cfg = VerificationReporterCfg::new(
|
|
PUS_APID,
|
|
#[allow(clippy::box_default)]
|
|
Box::new(seq_count_provider.clone()),
|
|
1,
|
|
2,
|
|
8,
|
|
)
|
|
.unwrap();
|
|
|
|
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
|
|
|
|
// Create event handling components
|
|
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
|
|
let (event_sender, event_man_rx) = channel();
|
|
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
|
|
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
|
|
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
|
|
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
|
|
let mut pus_event_dispatcher =
|
|
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
|
|
let (pus_event_man_tx, pus_event_man_rx) = channel();
|
|
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
|
|
let mut reporter_event_handler = verif_reporter.clone();
|
|
let mut reporter_aocs = verif_reporter.clone();
|
|
let mut reporter_pld = verif_reporter.clone();
|
|
event_man.subscribe_all(pus_event_man_send_provider.id());
|
|
|
|
let mut request_map = HashMap::new();
|
|
let (aocs_thread_tx, aocs_thread_rx) = channel::<RequestWithToken>();
|
|
let (pld_thread_tx, pld_thread_rx) = channel::<RequestWithToken>();
|
|
request_map.insert(RequestTargetId::AcsSubsystem as u32, aocs_thread_tx);
|
|
request_map.insert(RequestTargetId::PldSubsystem as u32, pld_thread_tx);
|
|
//add here receivers for tmtc task to send requests to
|
|
//request_map.insert(RequestTargetId::CanTask as u32, can_thread_tx);
|
|
|
|
let tc_source = PusTcSource {
|
|
tc_store,
|
|
tc_source: tc_source_tx,
|
|
};
|
|
|
|
// Create clones here to allow moving the values
|
|
let core_args = OtherArgs {
|
|
sock_addr,
|
|
verif_reporter,
|
|
event_sender,
|
|
event_request_tx,
|
|
request_map,
|
|
};
|
|
let tc_args = TcArgs {
|
|
tc_source,
|
|
tc_receiver: tc_source_rx,
|
|
};
|
|
let tm_args = TmArgs {
|
|
tm_store: tm_store.clone(),
|
|
tm_sink_sender: tm_funnel_tx.clone(),
|
|
tm_server_rx,
|
|
};
|
|
|
|
let (aocs_can_tx, aocs_can_rx) = mpsc::channel::<PackageModel>();
|
|
let (power_can_tx, power_can_rx) = mpsc::channel::<PackageModel>();
|
|
let (pld_can_tx, pld_can_rx) = mpsc::channel::<PackageModel>();
|
|
|
|
// make tx thread id hashmap
|
|
let mut can_senders = HashMap::new();
|
|
can_senders.insert(ThreadId::AOCSThread, aocs_can_tx);
|
|
can_senders.insert(ThreadId::PowerThread, power_can_tx);
|
|
can_senders.insert(ThreadId::PLDThread, pld_can_tx);
|
|
|
|
// get package id hashmap
|
|
let package_ids_rx = load_package_ids();
|
|
|
|
info!("Starting TMTC task");
|
|
let builder0 = thread::Builder::new().name("TMTCThread".into());
|
|
let jh0 = builder0.spawn(move || {
|
|
core_tmtc_task(core_args, tc_args, tm_args);
|
|
});
|
|
|
|
let (can_tx_sender, can_tx_receiver) = channel();
|
|
|
|
let (pcdu_tx, pcdu_rx) = mpsc::channel::<(SwitchId, SwitchState)>();
|
|
let pcdu_can_tx_sender =
|
|
can_tx_sender.clone();
|
|
|
|
let mut device_state_map = HashMap::new();
|
|
for id in DeviceId::iter() {
|
|
device_state_map.insert(id, SwitchState::Off);
|
|
}
|
|
let clonable_device_state_map = Arc::new(Mutex::new(device_state_map));
|
|
|
|
let mut power_switcher = PowerSwitcher::new(pcdu_tx, clonable_device_state_map.clone());
|
|
|
|
info!("Starting power task");
|
|
let builder2 = thread::Builder::new().name("PowerThread".into());
|
|
let jh2 = builder2.spawn(move || {
|
|
core_power_task(
|
|
pcdu_rx,
|
|
pcdu_can_tx_sender,
|
|
power_can_rx,
|
|
clonable_device_state_map.clone(),
|
|
);
|
|
});
|
|
|
|
/*
|
|
// AOCS Thread
|
|
let socket1 =
|
|
can::CanTxHandler::new_socket("can0", ThreadId::AOCSThread, package_map_aocs_tx).unwrap();
|
|
info!("Starting AOCS receiving thread");
|
|
let builder2 = thread::Builder::new().name("AOCSThread".into());
|
|
let jh2 = builder2.spawn(move || {
|
|
let mut time_stamp_buf: [u8; 7] = [0; 7];
|
|
let mut huge_buf: [u8; 8192] = [0; 8192];
|
|
let data: [u8; 3] = [1, 2, 3];
|
|
let current_mgm_data = MgmData::default();
|
|
//let current_mgt_data = MgtData::default();
|
|
//let current_
|
|
loop {
|
|
// device handling
|
|
//info!("Sending {:?}", PackageId::AOCSControlMGT1);
|
|
//socket1.tx_socket(PackageId::AOCSControlMGT1, &data);
|
|
//info!("Waiting for {:?}", PackageId::AOCSDataMGM1);
|
|
let msg = aocs_can_rx.try_recv();
|
|
//current_mgm_data.x = new_data
|
|
match aocs_can_rx.try_recv() {
|
|
Ok(package) => match package.package_id() {
|
|
_ => warn!("Incorrect Id"),
|
|
},
|
|
Err(_) => {}
|
|
}
|
|
|
|
// telecommand handling
|
|
match acs_thread_rx.try_recv() {
|
|
Ok(request_with_token) => {
|
|
match request_with_token.0 {
|
|
Request::HkRequest(hk_req) => {
|
|
match hk_req {
|
|
HkRequest::OneShot(id) => {
|
|
assert_eq!(id.target_id, RequestTargetId::AcsSubsystem as u32);
|
|
if id.unique_id == 0 {
|
|
let mut sp_header = SpHeader::tm_unseg(
|
|
0x02,
|
|
aocs_seq_count_provider.get_and_increment(),
|
|
0,
|
|
)
|
|
.unwrap();
|
|
let cds_stamp =
|
|
TimeProvider::from_now_with_u16_days().unwrap();
|
|
cds_stamp.write_to_bytes(&mut time_stamp_buf);
|
|
let mut len = id.write_to_be_bytes(&mut huge_buf).unwrap();
|
|
let json_string = "asdf";
|
|
huge_buf[8..json_string.len() + 8]
|
|
.copy_from_slice(json_string.as_bytes());
|
|
len += json_string.len();
|
|
let tm_sec_header = PusTmSecondaryHeader::new_simple(
|
|
3,
|
|
Subservice::TmHkPacket as u8,
|
|
&time_stamp_buf,
|
|
);
|
|
let hk_tm = PusTm::new(
|
|
&mut sp_header,
|
|
tm_sec_header,
|
|
Some(&huge_buf[0..len]),
|
|
true,
|
|
);
|
|
let addr = aocs_tm_store.add_pus_tm(&hk_tm);
|
|
aocs_tm_funnel_tx.send(addr).expect("sending failed");
|
|
/* let start_token = self //implement this for verification
|
|
.verif_reporter
|
|
.start_success(token, &self.time_stamp)
|
|
.expect("Error sending start success");
|
|
self.tm_tx
|
|
.send(addr)
|
|
.expect("Sending TM to TM funnel failed");
|
|
self.verif_reporter
|
|
.completion_success(start_token, &self.time_stamp)
|
|
.expect("Error sending completion success");
|
|
|
|
*/
|
|
}
|
|
}
|
|
HkRequest::Enable(_) => {}
|
|
HkRequest::Disable(_) => {}
|
|
HkRequest::ModifyCollectionInterval(_, _) => {}
|
|
}
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
Err(_) => {}
|
|
}
|
|
}
|
|
});
|
|
|
|
*/
|
|
|
|
let package_map_pld_tx = load_package_ids();
|
|
let pld_tm_funnel_tx = tm_funnel_tx.clone();
|
|
let mut pld_tm_store = tm_store.clone();
|
|
|
|
let pld_can_tx_sender =
|
|
can_tx_sender.clone();
|
|
|
|
//let mut pcdu_tx_clone = pcdu_tx.clone();
|
|
println!("Starting Payload Handling task");
|
|
let builder3 = thread::Builder::new().name("PLDThread".into());
|
|
let jh3 = builder3.spawn(move || {
|
|
core_pld_task(
|
|
power_switcher.clone(),
|
|
pld_thread_rx,
|
|
pld_can_rx,
|
|
pld_can_tx_sender,
|
|
&mut reporter_pld,
|
|
);
|
|
});
|
|
|
|
println!("Starting TM funnel task");
|
|
let builder4 = thread::Builder::new().name("TMFunnelThread".into());
|
|
let jh4 = builder4.spawn(move || {
|
|
let tm_funnel = TmFunnel {
|
|
tm_server_tx,
|
|
tm_funnel_rx,
|
|
};
|
|
loop {
|
|
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
|
|
tm_funnel
|
|
.tm_server_tx
|
|
.send(addr)
|
|
.expect("Sending TM to server failed");
|
|
}
|
|
}
|
|
});
|
|
|
|
let package_map_aocs_tx = load_package_ids();
|
|
let aocs_tm_funnel_tx = tm_funnel_tx.clone();
|
|
let mut aocs_tm_store = tm_store.clone();
|
|
|
|
let mut mgm_shared_data: Arc<Mutex<MGMData>> = Arc::default();
|
|
|
|
let aocs_sensor_data = Arc::new(Mutex::new(AocsSensorData::new()));
|
|
|
|
info!("Starting AOCS task");
|
|
let builder5 = thread::Builder::new().name("AOCSThread".into());
|
|
let jh5 = builder5.spawn(move || {
|
|
let mut aocs_housekeeper = AocsHousekeeper::new(
|
|
aocs_sensor_data.clone(),
|
|
aocs_thread_rx,
|
|
aocs_seq_count_provider,
|
|
aocs_tm_store,
|
|
aocs_tm_funnel_tx,
|
|
reporter_aocs,
|
|
);
|
|
loop {
|
|
aocs_housekeeper.handle_hk_request();
|
|
}
|
|
});
|
|
|
|
jh0.unwrap()
|
|
.join()
|
|
.expect("Joining UDP TMTC server thread failed");
|
|
#[cfg(feature = "can")]
|
|
{
|
|
let can_rx_socket = can::CanRxHandler::new_socket("can0", can_senders.clone(), package_ids_rx.clone()).unwrap();
|
|
info!("Starting CAN Socket listening task");
|
|
let builder1 = thread::Builder::new().name("CanRxHandler".into());
|
|
let jh1 = builder1.spawn(move || loop {
|
|
let frame = can_rx_socket.rx_socket();
|
|
if let Some(frame) = frame {
|
|
let forward = can_rx_socket.forward_frame(frame);
|
|
}
|
|
});
|
|
|
|
let mut can_tx_socket = CanTxHandler::new_socket("can0", package_ids_rx.clone(), can_tx_receiver).unwrap();
|
|
info!("Starting CAN Socket writing task");
|
|
let builderCanTx = thread::Builder::new().name("TxHandler".into());
|
|
let jhCanTx = builderCanTx.spawn( move || loop {
|
|
can_tx_socket.process_incoming();
|
|
});
|
|
|
|
jh1.unwrap()
|
|
.join()
|
|
.expect("Joining CAN Bus Listening thread failed");
|
|
jhCanTx.unwrap()
|
|
.join()
|
|
.expect("Joining CAN Bus Writing thread failed");
|
|
}
|
|
jh2.unwrap().join().expect("Joining power thread failed");
|
|
jh3.unwrap().join().expect("Joining PLD thread failed");
|
|
jh4.unwrap()
|
|
.join()
|
|
.expect("Joining TM funnel thread failed");
|
|
jh5.unwrap().join().expect("Joining AOCS thread failed");
|
|
}
|
|
#[derive(Default)]
|
|
struct MgmData {
|
|
x: i16,
|
|
y: i16,
|
|
z: i16,
|
|
}
|