This commit is contained in:
lkoester 2023-03-09 09:54:21 +01:00
parent 71a8a249e8
commit e67835733f
13 changed files with 749 additions and 412 deletions

View File

@ -1,21 +1,24 @@
use std::sync::{Arc, Mutex}; use crate::action::ActionRequest;
use crate::can_ids::{DeviceId, PackageId, PackageModel}; use crate::can_ids::{DeviceId, PackageId, PackageModel};
use crate::hk::{AOCSSensorData, AocsHousekeeper};
use crate::power_handler::{DeviceState, PowerSwitcher}; use crate::power_handler::{DeviceState, PowerSwitcher};
use crate::requests::{Request, RequestWithToken}; use crate::requests::{Request, RequestWithToken};
use satrs_core::power::SwitchId; use crate::tmtc::TmStore;
use std::sync::mpsc::{channel, Receiver, Sender}; use byteorder::{ByteOrder, LittleEndian};
use std::u32;
use serde::{Deserialize, Serialize};
use byteorder::{LittleEndian, ByteOrder};
use num_derive::ToPrimitive; use num_derive::ToPrimitive;
use satrs_core::hk::HkRequest; use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest; use satrs_core::mode::ModeRequest;
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
use satrs_core::pus::verification::{StdVerifSenderError, TcStateAccepted, TcStateNone, VerificationReporterWithSender, VerificationToken}; use satrs_core::power::SwitchId;
use satrs_core::pus::verification::{
TcStateAccepted, TcStateNone, VerificationReporterWithSender, VerificationToken,
};
use satrs_core::pus::MpscPusInStoreSendError;
use satrs_core::seq_count::SeqCountProviderSyncClonable; use satrs_core::seq_count::SeqCountProviderSyncClonable;
use crate::action::ActionRequest; use serde::{Deserialize, Serialize};
use crate::hk::{AOCSHousekeeper, AOCSSensorData}; use std::sync::mpsc::{channel, Receiver, Sender};
use crate::tmtc::{TmStore}; use std::sync::{Arc, Mutex};
use std::u32;
#[derive(ToPrimitive)] #[derive(ToPrimitive)]
pub enum AOCSSensorMode { pub enum AOCSSensorMode {
@ -53,20 +56,23 @@ pub struct MGMData {
} }
impl MGMData { impl MGMData {
pub fn from_floats(axis_1: f64, pub fn from_floats(axis_1: f64, axis_2: f64, axis_3: f64) -> MGMData {
axis_2: f64, MGMData {
axis_3: f64, axis_1,
) -> MGMData { axis_2,
MGMData{axis_1, axis_2, axis_3} axis_3,
}
} }
pub fn new() -> MGMData { MGMData{axis_1: 0.0, axis_2: 0.0, axis_3: 0.0} } pub fn new() -> MGMData {
MGMData {
axis_1: 0.0,
axis_2: 0.0,
axis_3: 0.0,
}
}
pub fn update(&mut self, pub fn update(&mut self, axis_1: f64, axis_2: f64, axis_3: f64) {
axis_1: f64,
axis_2: f64,
axis_3: f64
) {
self.axis_1 = axis_1; self.axis_1 = axis_1;
self.axis_2 = axis_2; self.axis_2 = axis_2;
self.axis_3 = axis_3; self.axis_3 = axis_3;
@ -92,25 +98,43 @@ pub struct CSSData {
} }
impl CSSData { impl CSSData {
pub fn from_floats(voltage_1: f64, pub fn from_floats(
voltage_2: f64, voltage_1: f64,
voltage_3: f64, voltage_2: f64,
voltage_4: f64, voltage_3: f64,
voltage_5: f64, voltage_4: f64,
voltage_6: f64, voltage_5: f64,
voltage_6: f64,
) -> CSSData { ) -> CSSData {
CSSData{voltage_1, voltage_2, voltage_3, voltage_4, voltage_5, voltage_6} CSSData {
voltage_1,
voltage_2,
voltage_3,
voltage_4,
voltage_5,
voltage_6,
}
} }
pub fn new() -> CSSData { CSSData{voltage_1: 0.0, voltage_2: 0.0, voltage_3: 0.0, voltage_4: 0.0, voltage_5: 0.0, voltage_6: 0.0} } pub fn new() -> CSSData {
CSSData {
voltage_1: 0.0,
voltage_2: 0.0,
voltage_3: 0.0,
voltage_4: 0.0,
voltage_5: 0.0,
voltage_6: 0.0,
}
}
pub fn update(&mut self, pub fn update(
voltage_1: f64, &mut self,
voltage_2: f64, voltage_1: f64,
voltage_3: f64, voltage_2: f64,
voltage_4: f64, voltage_3: f64,
voltage_5: f64, voltage_4: f64,
voltage_6: f64, voltage_5: f64,
voltage_6: f64,
) { ) {
self.voltage_1 = voltage_1; self.voltage_1 = voltage_1;
self.voltage_2 = voltage_2; self.voltage_2 = voltage_2;
@ -121,11 +145,25 @@ impl CSSData {
} }
pub fn to_array(&self) -> [f64; 6] { pub fn to_array(&self) -> [f64; 6] {
[self.voltage_1, self.voltage_2, self.voltage_3, self.voltage_4, self.voltage_5, self.voltage_6] [
self.voltage_1,
self.voltage_2,
self.voltage_3,
self.voltage_4,
self.voltage_5,
self.voltage_6,
]
} }
pub fn to_tuple(&self) -> (f64, f64, f64, f64, f64, f64) { pub fn to_tuple(&self) -> (f64, f64, f64, f64, f64, f64) {
(self.voltage_1, self.voltage_2, self.voltage_3, self.voltage_4, self.voltage_5, self.voltage_6) (
self.voltage_1,
self.voltage_2,
self.voltage_3,
self.voltage_4,
self.voltage_5,
self.voltage_6,
)
} }
} }
@ -149,7 +187,9 @@ impl AOCSSensorHandler for CSSHandler {
} }
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> { fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx.send(PackageModel::new(id, buf).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(id, buf).unwrap())
.unwrap();
return Ok(()); return Ok(());
} }
} }
@ -166,7 +206,17 @@ impl CSSHandler {
can_rx: Receiver<PackageModel>, can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
) -> CSSHandler { ) -> CSSHandler {
CSSHandler{power_switcher, device_id, switch_id, device_state, device_mode, css_data: Arc::new(Mutex::new(CSSData::default())), can_tx, can_rx, request_rx} CSSHandler {
power_switcher,
device_id,
switch_id,
device_state,
device_mode,
css_data: Arc::new(Mutex::new(CSSData::default())),
can_tx,
can_rx,
request_rx,
}
} }
pub fn get_data_ref(&mut self) -> Arc<Mutex<CSSData>> { pub fn get_data_ref(&mut self) -> Arc<Mutex<CSSData>> {
@ -179,37 +229,39 @@ impl CSSHandler {
fn handle_request_messages(&mut self) { fn handle_request_messages(&mut self) {
let request = self.request_rx.try_recv().unwrap(); let request = self.request_rx.try_recv().unwrap();
let token = request.1; let token = request.1.unwrap();
match request.0 { match request.0 {
Request::HkRequest(_) => {} Request::HkRequest(_) => {}
Request::ModeRequest(request) => { Request::ModeRequest(request) => {
self.handle_mode_request(request, token); self.handle_mode_request(request, token);
} }
Request::ActionRequest(request) => { Request::ActionRequest(request) => self.handle_action_request(request, token),
self.handle_action_request(request, token)
}
} }
} }
fn handle_mode_request(&mut self, request: ModeRequest, token: VerificationToken<TcStateAccepted>) { fn handle_mode_request(
&mut self,
request: ModeRequest,
token: VerificationToken<TcStateAccepted>,
) {
match request { match request {
ModeRequest::SetMode(mode) => { ModeRequest::SetMode(mode) => match mode.mode() {
match mode.mode() { 0 => {}
0 => {} _ => {}
_ => {} },
} ModeRequest::ReadMode => {}
} ModeRequest::AnnounceMode => {}
ModeRequest::ReadMode(_) => {} ModeRequest::AnnounceModeRecursive => {}
ModeRequest::AnnounceMode(_) => {}
ModeRequest::AnnounceModeRecursive(_) => {}
} }
} }
fn handle_action_request(&mut self, request: ActionRequest, token: VerificationToken<TcStateAccepted>) { fn handle_action_request(
&mut self,
request: ActionRequest,
token: VerificationToken<TcStateAccepted>,
) {
match request { match request {
ActionRequest::ImageRequest(target_id) => { ActionRequest::ImageRequest(target_id) => {}
}
ActionRequest::OrientationRequest(_) => {} ActionRequest::OrientationRequest(_) => {}
ActionRequest::PointingRequest(_) => {} ActionRequest::PointingRequest(_) => {}
} }
@ -236,7 +288,9 @@ impl AOCSSensorHandler for MGMHandler {
} }
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> { fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx.send(PackageModel::new(id, buf).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(id, buf).unwrap())
.unwrap();
return Ok(()); return Ok(());
} }
} }
@ -273,8 +327,7 @@ impl MGMHandler {
self.read_sensor_data(); self.read_sensor_data();
} }
pub fn update_mode(&mut self) { pub fn update_mode(&mut self) {}
}
pub fn handle_requests(&mut self) { pub fn handle_requests(&mut self) {
if self.device_state == DeviceState::On { if self.device_state == DeviceState::On {
@ -295,11 +348,11 @@ impl MGMHandler {
} }
pub fn handle_mode_request(&mut self, mode_request: ModeRequest) { pub fn handle_mode_request(&mut self, mode_request: ModeRequest) {
match mode_request{ match mode_request {
ModeRequest::SetMode(_) => {} ModeRequest::SetMode(_) => {}
ModeRequest::ReadMode(_) => {} ModeRequest::ReadMode => {}
ModeRequest::AnnounceMode(_) => {} ModeRequest::AnnounceMode => {}
ModeRequest::AnnounceModeRecursive(_) => {} ModeRequest::AnnounceModeRecursive => {}
} }
} }
@ -332,13 +385,12 @@ impl MGMHandler {
let float_data = self.decode_sensor_data(package.data()); let float_data = self.decode_sensor_data(package.data());
if let Ok(mut mgm_data) = self.mgm_data.lock() { if let Ok(mut mgm_data) = self.mgm_data.lock() {
match package.package_id() { match package.package_id() {
PackageId::AOCSDataMGM1 => { mgm_data.axis_1 = float_data } PackageId::AOCSDataMGM1 => mgm_data.axis_1 = float_data,
PackageId::AOCSDataMGM2 => { mgm_data.axis_2 = float_data } PackageId::AOCSDataMGM2 => mgm_data.axis_2 = float_data,
PackageId::AOCSDataMGM3 => { mgm_data.axis_3 = float_data } PackageId::AOCSDataMGM3 => mgm_data.axis_3 = float_data,
_ => {} _ => {}
} }
} }
} }
} }
@ -349,93 +401,11 @@ impl MGMHandler {
} }
pub struct AOCSController { pub struct AOCSController {
aocs_housekeeper: AOCSHousekeeper, aocs_housekeeper: AocsHousekeeper,
mgm_handler: MGMHandler, mgm_handler: MGMHandler,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
hk_request_tx: Sender<RequestWithToken>, hk_request_tx: Sender<RequestWithToken>,
mgm_request_tx: Sender<RequestWithToken>, mgm_request_tx: Sender<RequestWithToken>,
} }
impl AOCSController { pub fn core_aocs_loop() {}
pub fn new(
sensor_data_pool: Arc<Mutex<AOCSSensorData>>,
seq_count_provider: SeqCountProviderSyncClonable,
aocs_can_receiver_rx: Receiver<PackageModel>,
aocs_can_sender_tx: Sender<PackageModel>,
mgm_can_receiver_rx: Receiver<PackageModel>,
aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>,
power_switcher: PowerSwitcher,
aocs_request_rx: Receiver<RequestWithToken>,
) -> AOCSController {
let mgm_can_sender_tx = aocs_can_sender_tx.clone();
let (mgm_request_tx, mgm_request_rx) = channel();
let (hk_request_tx, hk_request_rx) = channel();
let aocs_housekeeper = AOCSHousekeeper::new(sensor_data_pool, hk_request_rx, seq_count_provider, aocs_tm_store, aocs_tm_funnel_tx, verif_reporter);
let mgm_handler = MGMHandler::new(power_switcher, DeviceId::MGM1, mgm_can_sender_tx, mgm_can_receiver_rx, mgm_request_rx);
AOCSController{aocs_housekeeper, mgm_handler, request_rx: aocs_request_rx, hk_request_tx, mgm_request_tx}
}
pub fn periodic_op(&mut self) {
self.update_sensors();
self.process_requests();
}
pub fn process_requests(&mut self) {
if let Ok(request) = self.request_rx.try_recv() {
match request.0 {
Request::HkRequest(hk_request) => {
self.handle_hk_request(hk_request);
}
Request::ModeRequest(mode_request) => {
self.handle_mode_request(mode_request);
}
Request::ActionRequest(_) => {}
}
}
}
pub fn handle_hk_request(&mut self, request: HkRequest) {
match request {
HkRequest::OneShot(id) => {
self.aocs_housekeeper.one_shot_hk(id);
}
HkRequest::Enable(id) => {
self.aocs_housekeeper.enable_hk(id);
}
HkRequest::Disable(id) => {
self.aocs_housekeeper.disable_hk(id);
}
HkRequest::ModifyCollectionInterval(_, _) => {}
}
}
pub fn handle_mode_request(&mut self, mode_request: ModeRequest) {
match mode_request {
ModeRequest::SetMode(mode_command) => {
// if let mode_command.target_id
match mode_command.mode() {
0 => self.set_mode_off(),
1 => self.set_mode_on(),
_ => {}
}
}
ModeRequest::ReadMode(_) => {}
ModeRequest::AnnounceMode(_) => {}
ModeRequest::AnnounceModeRecursive(_) => {}
}
}
pub fn set_mode_off(&mut self) {}
pub fn set_mode_on(&mut self) {}
pub fn update_sensors(&mut self) {
self.mgm_handler.periodic_op();
}
}

View File

@ -1,6 +1,6 @@
use crate::can_ids::{ use crate::can_ids::{
can_id_to_package_id, package_id_to_can_id, DeviceId, PackageId, can_id_to_package_id, package_id_to_can_id, DeviceId, PackageId, PackageModel,
PackageModel, SenderReceiverThread, ThreadId, SenderReceiverApid, SenderReceiverThread, ThreadId,
}; };
use embedded_can::{self, Frame}; use embedded_can::{self, Frame};
use log::{debug, warn}; use log::{debug, warn};
@ -13,9 +13,9 @@ pub struct CanRxHandler {
interface: &'static str, interface: &'static str,
socket: socket::CanSocket, socket: socket::CanSocket,
//frame_id_to_sender_id: HashMap<embedded_can::Id, u32>, // double hash map: frame id -> receiver id -> sender handle //frame_id_to_sender_id: HashMap<embedded_can::Id, u32>, // double hash map: frame id -> receiver id -> sender handle
can_senders: HashMap<ThreadId, Sender<PackageModel>>, can_senders: HashMap<u16, Sender<PackageModel>>,
//dismissed_ids: Vec<embedded_can::Id>, //dismissed_ids: Vec<embedded_can::Id>,
package_map: HashMap<PackageId, SenderReceiverThread>, package_map: HashMap<PackageId, SenderReceiverApid>,
//packet_id_to_sender_id: HashMap<PackageId, ThreadId>, //packet_id_to_sender_id: HashMap<PackageId, ThreadId>,
} }
@ -23,8 +23,8 @@ impl CanRxHandler {
pub fn new_socket( pub fn new_socket(
interface: &'static str, interface: &'static str,
//frame_id_to_sender_id: HashMap<embedded_can::Id, u32>, //frame_id_to_sender_id: HashMap<embedded_can::Id, u32>,
can_senders: HashMap<ThreadId, Sender<PackageModel>>, can_senders: HashMap<u16, Sender<PackageModel>>,
package_map: HashMap<PackageId, SenderReceiverThread>, package_map: HashMap<PackageId, SenderReceiverApid>,
) -> Result<CanRxHandler, ()> { ) -> Result<CanRxHandler, ()> {
let socket = socket::CanSocket::open(&interface); let socket = socket::CanSocket::open(&interface);
if let Ok(socket) = socket { if let Ok(socket) = socket {
@ -66,13 +66,13 @@ impl CanRxHandler {
if self.package_map.contains_key(&frame_id) { if self.package_map.contains_key(&frame_id) {
let value = self.package_map.get(&frame_id).unwrap(); let value = self.package_map.get(&frame_id).unwrap();
if value.get_sender() != DeviceId::OBC { if value.get_sender() != DeviceId::OBC {
let message_sender = self.can_senders.get(&value.get_thread()).unwrap(); let message_sender = self.can_senders.get(&value.get_apid()).unwrap();
let data = frame.data(); let data = frame.data();
let message = let message =
PackageModel::new(frame_id, data).expect("Error generating message."); PackageModel::new(frame_id, data).expect("Error generating message.");
message_sender.send(message).expect(&*format!( message_sender.send(message).expect(&*format!(
"Failure sending can bus frame to thread{:?}, frame id {:?}", "Failure sending can bus frame to thread{:?}, frame id {:?}",
value.get_thread(), value.get_apid(),
frame_id frame_id
)); ));
} }
@ -84,14 +84,14 @@ impl CanRxHandler {
pub struct CanTxHandler { pub struct CanTxHandler {
interface: &'static str, interface: &'static str,
socket: socket::CanSocket, socket: socket::CanSocket,
package_map: HashMap<PackageId, SenderReceiverThread>, package_map: HashMap<PackageId, SenderReceiverApid>,
message_receiver: Receiver<PackageModel>, message_receiver: Receiver<PackageModel>,
} }
impl CanTxHandler { impl CanTxHandler {
pub fn new_socket( pub fn new_socket(
interface: &'static str, interface: &'static str,
package_map: HashMap<PackageId, SenderReceiverThread>, package_map: HashMap<PackageId, SenderReceiverApid>,
message_receiver: Receiver<PackageModel>, message_receiver: Receiver<PackageModel>,
) -> Result<CanTxHandler, ()> { ) -> Result<CanTxHandler, ()> {
let socket = socket::CanSocket::open(&interface); let socket = socket::CanSocket::open(&interface);

View File

@ -2,6 +2,7 @@ use embedded_can::{Id, StandardId};
use log::warn; use log::warn;
use std::collections::HashMap; use std::collections::HashMap;
use crate::tmtc::{CSS_APID, MGM_APID, MGT_APID, PLD_APID, PWR_APID, RWL_APID, STR_APID};
pub use num_derive::{FromPrimitive, ToPrimitive}; pub use num_derive::{FromPrimitive, ToPrimitive};
pub use num_traits::{FromPrimitive, ToPrimitive}; pub use num_traits::{FromPrimitive, ToPrimitive};
pub use strum::IntoEnumIterator; // 0.17.1 pub use strum::IntoEnumIterator; // 0.17.1
@ -154,6 +155,7 @@ impl PackageModel {
} }
} }
// TODO delete!!
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub struct SenderReceiverThread { pub struct SenderReceiverThread {
sender: DeviceId, sender: DeviceId,
@ -181,7 +183,7 @@ impl SenderReceiverThread {
} }
} }
pub fn load_package_ids() -> HashMap<PackageId, SenderReceiverThread> { pub fn load_package_id_to_threads() -> HashMap<PackageId, SenderReceiverThread> {
let mut package_map: HashMap<PackageId, SenderReceiverThread> = HashMap::new(); let mut package_map: HashMap<PackageId, SenderReceiverThread> = HashMap::new();
let properties = vec![ let properties = vec![
@ -244,6 +246,96 @@ pub fn load_package_ids() -> HashMap<PackageId, SenderReceiverThread> {
return package_map; return package_map;
} }
#[derive(Debug, Copy, Clone)]
pub struct SenderReceiverApid {
sender: DeviceId,
receiver: DeviceId,
apid: u16,
}
impl SenderReceiverApid {
pub fn new(sender: DeviceId, receiver: DeviceId, apid: u16) -> Self {
Self {
sender,
receiver,
apid,
}
}
pub fn get_sender(&self) -> DeviceId {
self.sender
}
pub fn get_receiver(&self) -> DeviceId {
self.receiver
}
pub fn get_apid(&self) -> u16 {
self.apid
}
}
pub fn load_package_id_to_apids() -> HashMap<PackageId, SenderReceiverApid> {
let mut package_map: HashMap<PackageId, SenderReceiverApid> = HashMap::new();
let properties = vec![
SenderReceiverApid::new(DeviceId::OBC, DeviceId::PCDU, PWR_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::PCDU, PWR_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::PCDU, PWR_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::PCDU, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::PCDU, DeviceId::OBC, PWR_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGT1, MGT_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGT2, MGT_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGT3, MGT_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGT4, MGT_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::RWL1, RWL_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::RWL2, RWL_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::RWL3, RWL_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::RWL4, RWL_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGM1, MGM_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGM2, MGM_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGM3, MGM_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::MGM4, MGM_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor1, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor2, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor3, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor4, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor5, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::SunSensor6, CSS_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::StarTracker, STR_APID),
SenderReceiverApid::new(DeviceId::MGM1, DeviceId::OBC, MGM_APID),
SenderReceiverApid::new(DeviceId::MGM2, DeviceId::OBC, MGM_APID),
SenderReceiverApid::new(DeviceId::MGM3, DeviceId::OBC, MGM_APID),
SenderReceiverApid::new(DeviceId::MGM4, DeviceId::OBC, MGM_APID),
SenderReceiverApid::new(DeviceId::SunSensor1, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::SunSensor2, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::SunSensor3, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::SunSensor4, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::SunSensor5, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::SunSensor6, DeviceId::OBC, CSS_APID),
SenderReceiverApid::new(DeviceId::StarTracker, DeviceId::OBC, STR_APID),
SenderReceiverApid::new(DeviceId::StarTracker, DeviceId::OBC, STR_APID),
SenderReceiverApid::new(DeviceId::StarTracker, DeviceId::OBC, STR_APID),
SenderReceiverApid::new(DeviceId::StarTracker, DeviceId::OBC, STR_APID),
SenderReceiverApid::new(DeviceId::OBC, DeviceId::Camera, PLD_APID),
SenderReceiverApid::new(DeviceId::Camera, DeviceId::OBC, PLD_APID),
SenderReceiverApid::new(DeviceId::Camera, DeviceId::OBC, PLD_APID),
SenderReceiverApid::new(DeviceId::Camera, DeviceId::OBC, PLD_APID),
];
let mut i = 0;
for id in PackageId::iter() {
let value = properties.get(i).unwrap();
package_map.insert(id, *value);
i += 1;
}
return package_map;
}
/* /*
pub fn load_device_ids() { pub fn load_device_ids() {
let mut package_map: HashMap<DeviceId, DeviceProperties> = HashMap::new(); let mut package_map: HashMap<DeviceId, DeviceProperties> = HashMap::new();

View File

@ -1,5 +1,8 @@
use crate::tmtc::{
MpscStoreAndSendError, PusTcSource, AOCS_APID, AOCS_HK_APID, CSS_APID, MGM_APID, MGT_APID,
PLD_APID, PUS_APID, PWR_APID, RWL_APID, STR_APID,
};
use log::warn; use log::warn;
use crate::tmtc::{MpscStoreAndSendError, PusTcSource, PUS_APID};
use satrs_core::spacepackets::{CcsdsPacket, SpHeader}; use satrs_core::spacepackets::{CcsdsPacket, SpHeader};
use satrs_core::tmtc::{CcsdsPacketHandler, ReceivesCcsdsTc}; use satrs_core::tmtc::{CcsdsPacketHandler, ReceivesCcsdsTc};
@ -11,7 +14,18 @@ impl CcsdsPacketHandler for CcsdsReceiver {
type Error = MpscStoreAndSendError; type Error = MpscStoreAndSendError;
fn valid_apids(&self) -> &'static [u16] { fn valid_apids(&self) -> &'static [u16] {
&[PUS_APID] &[
PUS_APID,
PLD_APID,
PWR_APID,
AOCS_APID,
AOCS_HK_APID,
MGM_APID,
CSS_APID,
STR_APID,
MGT_APID,
RWL_APID,
]
} }
fn handle_known_apid( fn handle_known_apid(
@ -21,6 +35,8 @@ impl CcsdsPacketHandler for CcsdsReceiver {
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
if sp_header.apid() == PUS_APID { if sp_header.apid() == PUS_APID {
return self.tc_source.pass_ccsds(sp_header, tc_raw); return self.tc_source.pass_ccsds(sp_header, tc_raw);
} else {
return self.tc_source.pass_ccsds(sp_header, tc_raw);
} }
Ok(()) Ok(())
} }

233
src/hk.rs
View File

@ -1,13 +1,19 @@
use std::collections::HashMap;
use strum::IntoEnumIterator;
use crate::aocs::{CSSData, MGMData}; use crate::aocs::{CSSData, MGMData};
use crate::hk::AocsDataType::float_value;
use crate::requests::Request; use crate::requests::Request;
use crate::requests::RequestWithToken; use crate::requests::RequestWithToken;
use crate::tmtc::TmStore; use crate::tmtc::{TmStore, AOCS_HK_APID};
use eurosim_obsw::{hk_err}; use byteorder::{ByteOrder, LittleEndian};
use eurosim_obsw::hk_err;
use num_enum::FromPrimitive;
use satrs_core::hk::{HkRequest, UniqueId};
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
use satrs_core::pus::hk::Subservice; use satrs_core::pus::hk::Subservice;
use satrs_core::pus::verification::{ use satrs_core::pus::verification::{FailParams, VerificationReporterWithSender};
FailParams, StdVerifSenderError, VerificationReporterWithSender, use satrs_core::pus::MpscPusInStoreSendError;
};
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore}; use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
use satrs_core::spacepackets::time::cds::TimeProvider; use satrs_core::spacepackets::time::cds::TimeProvider;
use satrs_core::spacepackets::time::TimeWriter; use satrs_core::spacepackets::time::TimeWriter;
@ -15,24 +21,82 @@ use satrs_core::spacepackets::tm::{PusTm, PusTmSecondaryHeader};
use satrs_core::spacepackets::SpHeader; use satrs_core::spacepackets::SpHeader;
use satrs_core::tmtc::AddressableId; use satrs_core::tmtc::AddressableId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::ops::{Deref}; use std::ops::Deref;
use std::sync::mpsc::{Receiver, Sender}; use std::sync::mpsc::{Receiver, Sender};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use satrs_core::hk::HkRequest; use strum_macros::EnumIter;
pub type CollectionIntervalFactor = u32; pub type CollectionIntervalFactor = u32;
pub const MGM_VOLTAGE_1: UniqueId = 1;
pub const MGM_VOLTAGE_2: UniqueId = 2;
pub const MGM_VOLTAGE_3: UniqueId = 3;
pub const CSS_VOLTAGE_1: UniqueId = 4;
pub const CSS_VOLTAGE_2: UniqueId = 5;
pub const CSS_VOLTAGE_3: UniqueId = 6;
pub const CSS_VOLTAGE_4: UniqueId = 7;
pub const CSS_VOLTAGE_5: UniqueId = 8;
pub const CSS_VOLTAGE_6: UniqueId = 9;
pub const STR_QUATERNION_1: UniqueId = 10;
pub const STR_QUATERNION_2: UniqueId = 11;
pub const STR_QUATERNION_3: UniqueId = 12;
pub const STR_QUATERNION_4: UniqueId = 13;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum AocsHkIds { pub enum AocsHkIds {
TestAocsSet = 1, TestAocsSet = 1,
TestMgmSet = 2, TestMgmSet = 2,
} }
#[derive(Clone)]
pub struct AocsDataMap {
map: HashMap<UniqueId, AocsDataType>,
}
impl AocsDataMap {
pub fn new() -> Self {
let mut data_map = HashMap::new();
data_map.insert(MGM_VOLTAGE_1, float_value(0.0));
data_map.insert(MGM_VOLTAGE_2, float_value(0.0));
data_map.insert(MGM_VOLTAGE_3, float_value(0.0));
data_map.insert(CSS_VOLTAGE_1, float_value(0.0));
data_map.insert(CSS_VOLTAGE_2, float_value(0.0));
data_map.insert(CSS_VOLTAGE_3, float_value(0.0));
data_map.insert(CSS_VOLTAGE_4, float_value(0.0));
data_map.insert(CSS_VOLTAGE_5, float_value(0.0));
data_map.insert(CSS_VOLTAGE_6, float_value(0.0));
data_map.insert(STR_QUATERNION_1, float_value(0.0));
data_map.insert(STR_QUATERNION_2, float_value(0.0));
data_map.insert(STR_QUATERNION_3, float_value(0.0));
data_map.insert(STR_QUATERNION_4, float_value(0.0));
Self { map: data_map }
}
pub fn update_value(&mut self, id: UniqueId, val: AocsDataType) -> Result<(), ()> {
if self.map.contains_key(&id) {
self.map.insert(id, val);
return Ok(());
}
Err(())
}
pub fn get_value(&self, id: UniqueId) -> Option<&AocsDataType> {
self.map.get(&id)
}
}
#[derive(Debug, EnumIter, PartialEq, Copy, Clone)]
pub enum AocsDataType {
float_value(f64),
}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct AOCSSensorData { pub struct AOCSSensorData {
mgm_data: MGMData, // Voltage for 3 axis mgm_data: MGMData, // Voltage for 3 axis
css_data: CSSData, // Voltage for 18 sun sensors css_data: CSSData, // Voltage for 18 sun sensors
str_data: [f64; 4], // Quaternion for position of satellite str_data: [f64; 4], // Quaternion for position of satellite
} }
impl AOCSSensorData { impl AOCSSensorData {
@ -74,35 +138,51 @@ impl AOCSSensorData {
} }
} }
pub struct AOCSHousekeeper { pub struct AocsHousekeeper {
sensor_data_pool: Arc<Mutex<AOCSSensorData>>, data_map: Arc<Mutex<AocsDataMap>>,
id_list: Vec<UniqueId>,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable, seq_count_provider: SeqCountProviderSyncClonable,
aocs_tm_store: TmStore, aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>, aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>, verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>,
periodic_hk_enabled: bool, periodic_hk_ids: Option<Vec<UniqueId>>,
periodic_hk_id: Option<AddressableId>,
} }
impl AOCSHousekeeper { impl AocsHousekeeper {
pub fn new( pub fn new(
sensor_data_pool: Arc<Mutex<AOCSSensorData>>, sensor_data_pool: Arc<Mutex<AocsDataMap>>,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable, seq_count_provider: SeqCountProviderSyncClonable,
aocs_tm_store: TmStore, aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>, aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>, verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>,
) -> AOCSHousekeeper { ) -> AocsHousekeeper {
AOCSHousekeeper { let id_list = vec![
sensor_data_pool, MGM_VOLTAGE_1,
MGM_VOLTAGE_2,
MGM_VOLTAGE_3,
CSS_VOLTAGE_1,
CSS_VOLTAGE_2,
CSS_VOLTAGE_3,
CSS_VOLTAGE_4,
CSS_VOLTAGE_5,
CSS_VOLTAGE_6,
STR_QUATERNION_1,
STR_QUATERNION_2,
STR_QUATERNION_3,
STR_QUATERNION_4,
];
AocsHousekeeper {
data_map: sensor_data_pool,
id_list,
request_rx, request_rx,
seq_count_provider, seq_count_provider,
aocs_tm_store, aocs_tm_store,
aocs_tm_funnel_tx, aocs_tm_funnel_tx,
verif_reporter, verif_reporter,
periodic_hk_enabled: false, periodic_hk_ids: None,
periodic_hk_id: None,
} }
} }
@ -115,7 +195,7 @@ impl AOCSHousekeeper {
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap(); cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
let start_token = self //implement this for verification let start_token = self //implement this for verification
.verif_reporter .verif_reporter
.start_success(request_with_token.1, Some(&time_stamp_buf)) .start_success(request_with_token.1.unwrap(), Some(&time_stamp_buf))
.expect("Error sending start success"); .expect("Error sending start success");
if let Ok(()) = match hk_req { if let Ok(()) = match hk_req {
HkRequest::OneShot(id) => self.one_shot_hk(id), HkRequest::OneShot(id) => self.one_shot_hk(id),
@ -147,38 +227,84 @@ impl AOCSHousekeeper {
} }
pub fn periodic_hk(&mut self) { pub fn periodic_hk(&mut self) {
if self.periodic_hk_enabled { //let json_string = self.aocs_data_to_str();
let json_string = self.aocs_data_to_str(); let data = self.data_map.lock().unwrap();
if let Some(id) = self.periodic_hk_id { let data_copy = data.clone();
self.send_hk_packet(id, &json_string); drop(data);
let mut buf: [u8; 8] = [0; 8];
if let Some(ids) = &self.periodic_hk_ids {
for id in ids.clone() {
if let Some(float_value(field)) = data_copy.get_value(id) {
let data_str = LittleEndian::write_f64(&mut buf, *field);
&self.send_hk_packet(id, &buf);
}
} }
} }
} }
pub fn one_shot_hk(&mut self, id: AddressableId) -> Result<(), ()> { pub fn one_shot_hk(&mut self, id: UniqueId) -> Result<(), ()> {
let json_string = self.aocs_data_to_str(); let data = self.data_map.lock().unwrap();
self.send_hk_packet(id, &json_string); let data_copy = data.clone();
drop(data);
let mut buf: [u8; 8] = [0; 8];
if let Some(float_value(field)) = data_copy.get_value(id) {
let data_str = LittleEndian::write_f64(&mut buf, *field);
self.send_hk_packet(id, &buf);
return Ok(());
}
Err(())
}
pub fn enable_hk(&mut self, id: UniqueId) -> Result<(), ()> {
if !self.id_list.contains(&id) {
return Err(());
}
let periodic_hk_ids = self.periodic_hk_ids.clone();
match periodic_hk_ids {
None => self.periodic_hk_ids = Some(vec![id]),
Some(mut ids) => {
ids.push(id);
self.periodic_hk_ids = Some(ids);
}
}
Ok(()) Ok(())
} }
pub fn enable_hk(&mut self, id: AddressableId) -> Result<(), ()> { pub fn disable_hk(&mut self, id: UniqueId) -> Result<(), ()> {
self.periodic_hk_enabled = true; if !self.id_list.contains(&id) {
self.periodic_hk_id = Some(id); return Err(());
return Ok(()) }
}
let periodic_hk_ids = self.periodic_hk_ids.clone();
pub fn disable_hk(&mut self, _id: AddressableId) -> Result<(), ()> { match periodic_hk_ids {
self.periodic_hk_enabled = false; None => return Err(()),
self.periodic_hk_id = None; Some(mut ids) => {
return Ok(()) if !ids.contains(&id) {
return Err(());
}
// .unwrap is allowed, since existence check is performed
let index = ids.iter().position(|x| *x == id).unwrap();
ids.remove(index);
if ids.len() < 1 {
self.periodic_hk_ids = None;
} else {
self.periodic_hk_ids = Some(ids);
}
}
}
Ok(())
} }
/*
pub fn aocs_data_to_str(&mut self) -> String { pub fn aocs_data_to_str(&mut self) -> String {
let pool = self.sensor_data_pool.lock().unwrap(); let pool = self.data_map.lock().unwrap();
serde_json::to_string(pool.deref()).unwrap() serde_json::to_string(pool.deref()).unwrap()
} }
pub fn send_hk_packet(&mut self, id: AddressableId, data: &str) { */
pub fn send_hk_packet_from_str(&mut self, id: UniqueId, data: &str) {
let mut time_stamp_buf: [u8; 7] = [0; 7]; let mut time_stamp_buf: [u8; 7] = [0; 7];
let mut huge_buf: [u8; 8192] = [0; 8192]; let mut huge_buf: [u8; 8192] = [0; 8192];
@ -186,7 +312,8 @@ impl AOCSHousekeeper {
SpHeader::tm_unseg(0x02, self.seq_count_provider.get_and_increment(), 0).unwrap(); SpHeader::tm_unseg(0x02, self.seq_count_provider.get_and_increment(), 0).unwrap();
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap(); let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap(); cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
let mut len = id.write_to_be_bytes(&mut huge_buf).unwrap(); huge_buf[0..4].copy_from_slice(&id.to_be_bytes());
let mut len = 4;
huge_buf[8..data.len() + 8].copy_from_slice(data.as_bytes()); huge_buf[8..data.len() + 8].copy_from_slice(data.as_bytes());
len += data.len(); len += data.len();
let tm_sec_header = let tm_sec_header =
@ -195,4 +322,24 @@ impl AOCSHousekeeper {
let addr = self.aocs_tm_store.add_pus_tm(&hk_tm); let addr = self.aocs_tm_store.add_pus_tm(&hk_tm);
self.aocs_tm_funnel_tx.send(addr).expect("sending failed"); self.aocs_tm_funnel_tx.send(addr).expect("sending failed");
} }
pub fn send_hk_packet(&mut self, id: UniqueId, data: &[u8]) {
let mut time_stamp_buf: [u8; 7] = [0; 7];
let mut huge_buf: [u8; 8192] = [0; 8192];
let mut sp_header =
SpHeader::tm_unseg(AOCS_HK_APID, self.seq_count_provider.get_and_increment(), 0)
.unwrap();
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
huge_buf[0..4].copy_from_slice(&id.to_be_bytes());
let mut len = 4;
huge_buf[8..data.len() + 8].copy_from_slice(data);
len += data.len();
let tm_sec_header =
PusTmSecondaryHeader::new_simple(3, Subservice::TmHkPacket as u8, &time_stamp_buf);
let hk_tm = PusTm::new(&mut sp_header, tm_sec_header, Some(&huge_buf[0..len]), true);
let addr = self.aocs_tm_store.add_pus_tm(&hk_tm);
self.aocs_tm_funnel_tx.send(addr).expect("sending failed");
}
} }

View File

@ -1,6 +1,6 @@
use std::net::Ipv4Addr;
use num_enum::{IntoPrimitive, TryFromPrimitive}; use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs_core::events::{EventU32TypedSev, SeverityInfo}; use satrs_core::events::{EventU32TypedSev, SeverityInfo};
use std::net::Ipv4Addr;
use satrs_mib::res_code::{ResultU16, ResultU16Info}; use satrs_mib::res_code::{ResultU16, ResultU16Info};
use satrs_mib::resultcode; use satrs_mib::resultcode;
@ -41,7 +41,7 @@ pub mod tmtc_err {
pub const PUS_SERVICE_NOT_IMPLEMENTED: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2); pub const PUS_SERVICE_NOT_IMPLEMENTED: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
#[resultcode( #[resultcode(
info = "Not enough data inside the TC application data field. Optionally includes: \ info = "Not enough data inside the TC application data field. Optionally includes: \
8 bytes of failure data containing 2 failure parameters, \ 8 bytes of failure data containing 2 failure parameters, \
P1 (u32 big endian): Expected data length, P2: Found data length" P1 (u32 big endian): Expected data length, P2: Found data length"
)] )]

View File

@ -11,16 +11,17 @@ mod can_ids;
mod ccsds; mod ccsds;
mod hk; mod hk;
mod logger; mod logger;
mod messages;
mod pld_handler; mod pld_handler;
mod power_handler; mod power_handler;
mod pus; mod pus;
mod requests; mod requests;
mod tmtc; mod tmtc;
mod messages;
use crate::requests::{RequestWithToken}; use crate::requests::RequestWithToken;
use crate::tmtc::{ use crate::tmtc::{
core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, PUS_APID, core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, AOCS_APID,
AOCS_HK_APID, CSS_APID, MGM_APID, MGT_APID, PLD_APID, PUS_APID, PWR_APID, RWL_APID, STR_APID,
}; };
use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT}; use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT};
use satrs_core::event_man::{ use satrs_core::event_man::{
@ -36,13 +37,13 @@ use satrs_core::pus::verification::{
}; };
use satrs_core::pus::{EcssTmErrorWithSend, EcssTmSenderCore}; use satrs_core::pus::{EcssTmErrorWithSend, EcssTmSenderCore};
use satrs_core::seq_count::SeqCountProviderSyncClonable; use satrs_core::seq_count::SeqCountProviderSyncClonable;
use satrs_core::{spacepackets::tm::PusTm}; use satrs_core::spacepackets::tm::PusTm;
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
use crate::can_ids::{ use crate::can_ids::{
load_package_ids, DeviceId, PackageModel, ThreadId, load_package_id_to_apids, load_package_id_to_threads, DeviceId, PackageModel, ThreadId,
}; };
use log::{info}; use log::info;
use satrs_core::power::{SwitchId, SwitchState}; use satrs_core::power::{SwitchId, SwitchState};
use std::collections::HashMap; use std::collections::HashMap;
use std::net::{IpAddr, SocketAddr}; use std::net::{IpAddr, SocketAddr};
@ -50,39 +51,13 @@ use std::sync::mpsc::channel;
use std::sync::{mpsc, Arc, Mutex, RwLock}; use std::sync::{mpsc, Arc, Mutex, RwLock};
use std::thread; use std::thread;
//use libc::time64_t; //use libc::time64_t;
use crate::aocs::{MGMData, MGMHandler}; use crate::aocs::{core_aocs_loop, MGMData, MGMHandler};
#[cfg(feature = "can")] #[cfg(feature = "can")]
use crate::can::CanTxHandler; use crate::can::CanTxHandler;
use crate::hk::{AOCSHousekeeper, AOCSSensorData}; use crate::hk::{AOCSSensorData, AocsDataMap, AocsHousekeeper};
use crate::pld_handler::{core_pld_task}; use crate::pld_handler::core_pld_task;
use crate::power_handler::{core_power_task, PowerSwitcher}; use crate::power_handler::{core_power_task, PowerSwitcher};
#[derive(Clone)]
struct EventTmSender {
store_helper: TmStore,
sender: mpsc::Sender<StoreAddr>,
}
impl EventTmSender {
fn new(store_helper: TmStore, sender: mpsc::Sender<StoreAddr>) -> Self {
Self {
store_helper,
sender,
}
}
}
impl EcssTmSenderCore for EventTmSender {
type Error = mpsc::SendError<StoreAddr>;
fn send_tm(&mut self, tm: PusTm) -> Result<(), EcssTmErrorWithSend<Self::Error>> {
let addr = self.store_helper.add_pus_tm(&tm);
self.sender
.send(addr)
.map_err(EcssTmErrorWithSend::SendError)
}
}
fn main() { fn main() {
logger::setup_logger().unwrap(); logger::setup_logger().unwrap();
info!("Running DemoSat OBSW!"); info!("Running DemoSat OBSW!");
@ -111,17 +86,20 @@ fn main() {
}; };
let seq_count_provider = SeqCountProviderSyncClonable::default(); let seq_count_provider = SeqCountProviderSyncClonable::default();
let msg_count_provider = SeqCountProviderSyncClonable::default();
let aocs_seq_count_provider = seq_count_provider.clone(); let aocs_seq_count_provider = seq_count_provider.clone();
let verif_seq_count_provider = seq_count_provider.clone();
let tmtc_seq_count_provider = seq_count_provider.clone();
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT); let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let (tc_source_tx, tc_source_rx) = channel(); let (tc_source_tx, tc_source_rx) = channel();
let (tm_funnel_tx, tm_funnel_rx) = channel(); let (tm_funnel_tx, tm_funnel_rx) = channel();
let (tm_server_tx, tm_server_rx) = channel(); let (tm_server_tx, tm_server_rx) = channel();
let verif_sender = MpscVerifSender::new(tm_store.pool.clone(), tm_funnel_tx.clone()); let verif_sender = MpscVerifSender::new(1, "name", tm_store.pool.clone(), tm_funnel_tx.clone());
let verif_cfg = VerificationReporterCfg::new( let verif_cfg = VerificationReporterCfg::new(
PUS_APID, PUS_APID,
#[allow(clippy::box_default)] Box::new(verif_seq_count_provider.clone()),
Box::new(seq_count_provider.clone()), Box::new(msg_count_provider),
1, 1,
2, 2,
8, 8,
@ -137,8 +115,7 @@ fn main() {
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv)); let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap(); let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default(); let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
let pus_event_dispatcher = let pus_event_dispatcher = PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
let (pus_event_man_tx, pus_event_man_rx) = channel(); let (pus_event_man_tx, pus_event_man_rx) = channel();
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx); let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
let reporter_event_handler = verif_reporter.clone(); let reporter_event_handler = verif_reporter.clone();
@ -146,11 +123,19 @@ fn main() {
let mut reporter_pld = verif_reporter.clone(); let mut reporter_pld = verif_reporter.clone();
event_man.subscribe_all(pus_event_man_send_provider.id()); event_man.subscribe_all(pus_event_man_send_provider.id());
// possibly deprecated, as the apid should replace the target id
let mut request_map = HashMap::new(); let mut request_map = HashMap::new();
let (aocs_thread_tx, aocs_thread_rx) = channel::<RequestWithToken>(); let (aocs_thread_tx, aocs_thread_rx) = channel::<RequestWithToken>();
let (pld_thread_tx, pld_thread_rx) = channel::<RequestWithToken>(); let (pld_thread_tx, pld_thread_rx) = channel::<RequestWithToken>();
request_map.insert(RequestTargetId::AcsSubsystem as u32, aocs_thread_tx); let (pwr_thread_tx, pwr_thread_rx) = channel::<RequestWithToken>();
request_map.insert(RequestTargetId::PldSubsystem as u32, pld_thread_tx); let (aocs_hk_tx, aocs_hk_rx) = channel::<RequestWithToken>();
let (mgm_tx, mgm_rx) = channel::<RequestWithToken>();
let (css_tx, css_rx) = channel::<RequestWithToken>();
let (str_tx, str_rx) = channel::<RequestWithToken>();
let (mgt_tx, mgt_rx) = channel::<RequestWithToken>();
let (rwl_tx, rwl_rx) = channel::<RequestWithToken>();
request_map.insert(RequestTargetId::AcsSubsystem as u32, aocs_thread_tx.clone());
request_map.insert(RequestTargetId::PldSubsystem as u32, pld_thread_tx.clone());
//add here receivers for tmtc task to send requests to //add here receivers for tmtc task to send requests to
//request_map.insert(RequestTargetId::CanTask as u32, can_thread_tx); //request_map.insert(RequestTargetId::CanTask as u32, can_thread_tx);
@ -159,6 +144,18 @@ fn main() {
tc_source: tc_source_tx, tc_source: tc_source_tx,
}; };
let mut apid_map = HashMap::new();
apid_map.insert(PLD_APID, pld_thread_tx.clone());
apid_map.insert(PWR_APID, pwr_thread_tx.clone());
apid_map.insert(AOCS_APID, aocs_thread_tx.clone());
apid_map.insert(AOCS_HK_APID, aocs_hk_tx.clone());
apid_map.insert(MGM_APID, mgm_tx.clone());
apid_map.insert(CSS_APID, css_tx.clone());
apid_map.insert(STR_APID, str_tx.clone());
apid_map.insert(MGT_APID, mgt_tx.clone());
apid_map.insert(RWL_APID, rwl_tx.clone());
// Create clones here to allow moving the values // Create clones here to allow moving the values
let core_args = OtherArgs { let core_args = OtherArgs {
sock_addr, sock_addr,
@ -166,6 +163,8 @@ fn main() {
event_sender, event_sender,
event_request_tx, event_request_tx,
request_map, request_map,
apid_map,
seq_count_provider: verif_seq_count_provider,
}; };
let tc_args = TcArgs { let tc_args = TcArgs {
tc_source, tc_source,
@ -177,18 +176,26 @@ fn main() {
tm_server_rx, tm_server_rx,
}; };
let (aocs_can_tx, aocs_can_rx) = mpsc::channel::<PackageModel>(); let (mgm_can_tx, mgm_can_rx) = mpsc::channel::<PackageModel>();
let (mgt_can_tx, mgt_can_rx) = mpsc::channel::<PackageModel>();
let (css_can_tx, css_can_rx) = mpsc::channel::<PackageModel>();
let (rwl_can_tx, rwl_can_rx) = mpsc::channel::<PackageModel>();
let (str_can_tx, str_can_rx) = mpsc::channel::<PackageModel>();
let (power_can_tx, power_can_rx) = mpsc::channel::<PackageModel>(); let (power_can_tx, power_can_rx) = mpsc::channel::<PackageModel>();
let (pld_can_tx, pld_can_rx) = mpsc::channel::<PackageModel>(); let (pld_can_tx, pld_can_rx) = mpsc::channel::<PackageModel>();
// make tx thread id hashmap // make tx thread id hashmap
let mut can_senders = HashMap::new(); let mut can_senders = HashMap::new();
can_senders.insert(ThreadId::AOCSThread, aocs_can_tx); can_senders.insert(PWR_APID, power_can_tx);
can_senders.insert(ThreadId::PowerThread, power_can_tx); can_senders.insert(PLD_APID, pld_can_tx);
can_senders.insert(ThreadId::PLDThread, pld_can_tx); can_senders.insert(MGM_APID, mgm_can_tx);
can_senders.insert(MGT_APID, mgt_can_tx);
can_senders.insert(RWL_APID, rwl_can_tx);
can_senders.insert(CSS_APID, css_can_tx);
can_senders.insert(STR_APID, str_can_tx);
// get package id hashmap // get package id hashmap
let package_ids_rx = load_package_ids(); let package_ids_rx = load_package_id_to_apids();
info!("Starting TMTC task"); info!("Starting TMTC task");
let builder0 = thread::Builder::new().name("TMTCThread".into()); let builder0 = thread::Builder::new().name("TMTCThread".into());
@ -199,7 +206,8 @@ fn main() {
let (can_tx_sender, can_tx_receiver) = channel(); let (can_tx_sender, can_tx_receiver) = channel();
#[cfg(feature = "can")] #[cfg(feature = "can")]
let mut can_rx_socket = can::CanRxHandler::new_socket("can0", can_senders.clone(), package_ids_rx.clone()).unwrap(); let mut can_rx_socket =
can::CanRxHandler::new_socket("can0", can_senders.clone(), package_ids_rx.clone()).unwrap();
#[cfg(feature = "can")] #[cfg(feature = "can")]
info!("Starting CAN Socket listening task"); info!("Starting CAN Socket listening task");
@ -210,19 +218,19 @@ fn main() {
}); });
#[cfg(feature = "can")] #[cfg(feature = "can")]
let mut can_tx_socket = CanTxHandler::new_socket("can0", package_ids_rx.clone(), can_tx_receiver).unwrap(); let mut can_tx_socket =
CanTxHandler::new_socket("can0", package_ids_rx.clone(), can_tx_receiver).unwrap();
#[cfg(feature = "can")] #[cfg(feature = "can")]
info!("Starting CAN Socket writing task"); info!("Starting CAN Socket writing task");
let builder_can_tx = thread::Builder::new().name("TxHandler".into()); let builder_can_tx = thread::Builder::new().name("TxHandler".into());
let jh_can_tx = builder_can_tx.spawn( move || loop { let jh_can_tx = builder_can_tx.spawn(move || loop {
#[cfg(feature = "can")] #[cfg(feature = "can")]
can_tx_socket.process_incoming(); can_tx_socket.process_incoming();
}); });
let (pcdu_tx, pcdu_rx) = mpsc::channel::<(SwitchId, SwitchState)>(); let (pcdu_tx, pcdu_rx) = mpsc::channel::<(SwitchId, SwitchState)>();
let pcdu_can_tx_sender = let pcdu_can_tx_sender = can_tx_sender.clone();
can_tx_sender.clone();
let mut device_state_map = HashMap::new(); let mut device_state_map = HashMap::new();
for id in DeviceId::iter() { for id in DeviceId::iter() {
@ -243,12 +251,11 @@ fn main() {
); );
}); });
let package_map_pld_tx = load_package_ids(); let package_map_pld_tx = load_package_id_to_threads();
let pld_tm_funnel_tx = tm_funnel_tx.clone(); let pld_tm_funnel_tx = tm_funnel_tx.clone();
let pld_tm_store = tm_store.clone(); let pld_tm_store = tm_store.clone();
let pld_can_tx_sender = let pld_can_tx_sender = can_tx_sender.clone();
can_tx_sender.clone();
let power_switcher_pld = power_switcher.clone(); let power_switcher_pld = power_switcher.clone();
@ -282,24 +289,42 @@ fn main() {
} }
}); });
let package_map_aocs_tx = load_package_ids(); let package_map_aocs_tx = load_package_id_to_threads();
let aocs_tm_funnel_tx = tm_funnel_tx.clone(); let aocs_tm_funnel_tx = tm_funnel_tx.clone();
let aocs_tm_store = tm_store.clone(); let aocs_tm_store = tm_store.clone();
let mgm_shared_data: Arc<Mutex<MGMData>> = Arc::default(); let mgm_shared_data: Arc<Mutex<MGMData>> = Arc::default();
let aocs_sensor_data = Arc::new(Mutex::new(AOCSSensorData::new())); let aocs_data = Arc::new(Mutex::new(AocsDataMap::new()));
let (mgm_action_tx, mgm_action_rx) = channel(); let (mgm_action_tx, mgm_action_rx) = channel::<RequestWithToken>();
let power_switcher_aocs = power_switcher.clone(); let power_switcher_aocs = power_switcher.clone();
info!("Starting AOCS task"); info!("Starting AOCS task");
let builder5 = thread::Builder::new().name("AOCSThread".into()); let builder5 = thread::Builder::new().name("AOCSThread".into());
let jh5 = builder5.spawn(move || { let jh5 = builder5.spawn(move || {
let mut mgm_handler = MGMHandler::new(power_switcher_aocs.clone(), DeviceId::MGM1, can_tx_sender.clone(), aocs_can_rx, mgm_action_rx); let mut aocs_housekeeper = AocsHousekeeper::new(
aocs_data,
aocs_hk_rx,
aocs_seq_count_provider,
aocs_tm_store,
aocs_tm_funnel_tx,
reporter_aocs,
);
let aocs_sensor_data = Arc::new(Mutex::new(AOCSSensorData::new())); loop {
let mut aocs_housekeeper = AOCSHousekeeper::new( aocs_housekeeper.handle_hk_request();
}
/*let mut mgm_handler = MGMHandler::new(
power_switcher_aocs.clone(),
DeviceId::MGM1,
can_tx_sender.clone(),
mgm_can_rx,
mgm_action_rx,
);
let aocs_sensor_data = Arc::new(Mutex::new(AocsDataMap::new()));
let mut aocs_housekeeper = AocsHousekeeper::new(
aocs_sensor_data.clone(), aocs_sensor_data.clone(),
aocs_thread_rx, aocs_thread_rx,
aocs_seq_count_provider, aocs_seq_count_provider,
@ -314,17 +339,21 @@ fn main() {
drop(locked_sensor_data); drop(locked_sensor_data);
aocs_housekeeper.handle_hk_request(); aocs_housekeeper.handle_hk_request();
} }
*/
core_aocs_loop();
}); });
jh0.unwrap() jh0.unwrap()
.join() .join()
.expect("Joining UDP TMTC server thread failed"); .expect("Joining UDP TMTC server thread failed");
jh1.unwrap() jh1.unwrap()
.join() .join()
.expect("Joining CAN Bus Listening thread failed"); .expect("Joining CAN Bus Listening thread failed");
jh_can_tx.unwrap() jh_can_tx
.unwrap()
.join() .join()
.expect("Joining CAN Bus Writing thread failed"); .expect("Joining CAN Bus Writing thread failed");
jh2.unwrap().join().expect("Joining power thread failed"); jh2.unwrap().join().expect("Joining power thread failed");

View File

@ -1,5 +1,5 @@
use std::sync::mpsc::Sender;
use crate::requests::RequestWithToken; use crate::requests::RequestWithToken;
use std::sync::mpsc::Sender;
pub struct InternalMessage { pub struct InternalMessage {
response_sender: Option<Sender<ResponseDataTypes>>, response_sender: Option<Sender<ResponseDataTypes>>,
@ -10,6 +10,4 @@ pub enum MessageDataTypes {
RequestWithToken(RequestWithToken), RequestWithToken(RequestWithToken),
} }
pub enum ResponseDataTypes { pub enum ResponseDataTypes {}
}

View File

@ -2,14 +2,17 @@ use crate::action::ActionRequest;
use crate::can_ids::{DeviceId, PackageId, PackageModel}; use crate::can_ids::{DeviceId, PackageId, PackageModel};
use crate::power_handler::{DeviceState, PowerSwitcher}; use crate::power_handler::{DeviceState, PowerSwitcher};
use crate::requests::{Request, RequestWithToken}; use crate::requests::{Request, RequestWithToken};
use eurosim_obsw::RequestTargetId; use eurosim_obsw::{tmtc_err, RequestTargetId};
use log::{debug}; use log::debug;
use satrs_core::power::{PowerSwitchInfo, PowerSwitcherCommandSender, SwitchId}; use satrs_core::power::{PowerSwitchInfo, PowerSwitcherCommandSender, SwitchId};
use satrs_core::pus::verification::{StdVerifSenderError, VerificationReporterWithSender}; use satrs_core::pus::verification::{FailParams, VerificationReporterWithSender};
use satrs_core::pus::MpscPusInStoreSendError;
use satrs_core::spacepackets::time::cds::TimeProvider; use satrs_core::spacepackets::time::cds::TimeProvider;
use satrs_core::spacepackets::time::TimeWriter; use satrs_core::spacepackets::time::TimeWriter;
use std::sync::mpsc; use std::sync::mpsc;
use std::sync::mpsc::{Receiver, Sender}; use std::sync::mpsc::{Receiver, Sender};
use std::thread::sleep;
use std::time::Duration;
#[derive(Debug, PartialEq, Copy, Clone)] #[derive(Debug, PartialEq, Copy, Clone)]
pub enum CameraMode { pub enum CameraMode {
@ -18,6 +21,7 @@ pub enum CameraMode {
Verification, Verification,
Start, Start,
End, End,
Broken,
} }
pub struct CameraHandler { pub struct CameraHandler {
@ -127,6 +131,7 @@ impl CameraHandler {
// Camera Device Handler State Machine // Camera Device Handler State Machine
match self.mode { match self.mode {
CameraMode::Broken => {}
CameraMode::Idle => {} CameraMode::Idle => {}
CameraMode::PictureRequest => { CameraMode::PictureRequest => {
if self.device_state == DeviceState::Off { if self.device_state == DeviceState::Off {
@ -137,17 +142,23 @@ impl CameraHandler {
debug!("switching power"); debug!("switching power");
} }
if self.device_state == DeviceState::SwitchingPower { if self.device_state == DeviceState::SwitchingPower {
if self let allowed_cycles = 10;
.power_switcher for i in 0..allowed_cycles {
.get_is_switch_on(self.camera_switch_id) if self
.expect("reading switch state failed") .power_switcher
{ .get_is_switch_on(self.camera_switch_id)
self.device_state = DeviceState::On; .expect("reading switch state failed")
debug!("device on"); {
self.device_state = DeviceState::On;
debug!("device on");
}
} }
self.mode = CameraMode::Broken;
} }
if self.device_state == DeviceState::On { if self.device_state == DeviceState::On {
self.can_tx.send(PackageModel::new(PackageId::CameraImageRequest, &[1]).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(PackageId::CameraImageRequest, &[1]).unwrap())
.unwrap();
debug!("sent camera request"); debug!("sent camera request");
self.mode = CameraMode::Verification; self.mode = CameraMode::Verification;
} }
@ -208,7 +219,7 @@ pub fn core_pld_task(
pld_thread_rx: Receiver<RequestWithToken>, pld_thread_rx: Receiver<RequestWithToken>,
pld_can_rx: Receiver<PackageModel>, pld_can_rx: Receiver<PackageModel>,
pld_can_tx: Sender<PackageModel>, pld_can_tx: Sender<PackageModel>,
reporter_pld: &mut VerificationReporterWithSender<StdVerifSenderError>, reporter_pld: &mut VerificationReporterWithSender<MpscPusInStoreSendError>,
) { ) {
let (_camera_mode_tx, camera_mode_rx) = mpsc::channel(); let (_camera_mode_tx, camera_mode_rx) = mpsc::channel();
let (_action_tx, action_rx) = mpsc::channel(); let (_action_tx, action_rx) = mpsc::channel();
@ -223,7 +234,7 @@ pub fn core_pld_task(
); );
let mut time_stamp_buf: [u8; 7] = [0; 7]; let mut time_stamp_buf: [u8; 7] = [0; 7];
loop { 'outer: loop {
match pld_thread_rx.try_recv() { match pld_thread_rx.try_recv() {
Ok(request_with_token) => { Ok(request_with_token) => {
match request_with_token.0 { match request_with_token.0 {
@ -237,7 +248,10 @@ pub fn core_pld_task(
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap(); cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
// send start verification and get token // send start verification and get token
let start_token = reporter_pld let start_token = reporter_pld
.start_success(request_with_token.1, Some(&time_stamp_buf)) .start_success(
request_with_token.1.unwrap(),
Some(&time_stamp_buf),
)
.expect("Error sending start success."); .expect("Error sending start success.");
debug!("{:?}", camera_handler.get_mode()); debug!("{:?}", camera_handler.get_mode());

View File

@ -1,8 +1,6 @@
use crate::can_ids::{DeviceId, PackageId, PackageModel}; use crate::can_ids::{DeviceId, PackageId, PackageModel};
use log::{debug}; use log::debug;
use satrs_core::power::{ use satrs_core::power::{PowerSwitchInfo, PowerSwitcherCommandSender, SwitchId, SwitchState};
PowerSwitchInfo, PowerSwitchProvider, PowerSwitcherCommandSender, SwitchId, SwitchState,
};
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::sync::mpsc::{Receiver, Sender}; use std::sync::mpsc::{Receiver, Sender};
@ -85,10 +83,6 @@ impl PowerSwitchInfo for PowerSwitcher {
} }
} }
impl PowerSwitchProvider for PowerSwitcher {
type Error = ();
}
impl PCDU { impl PCDU {
pub fn new( pub fn new(
switch_rx: Receiver<(SwitchId, SwitchState)>, switch_rx: Receiver<(SwitchId, SwitchState)>,
@ -108,7 +102,9 @@ impl PCDU {
return if let Ok(dev_id) = DeviceId::try_from(switch_id) { return if let Ok(dev_id) = DeviceId::try_from(switch_id) {
let dev_id_bytes = dev_id as u8; let dev_id_bytes = dev_id as u8;
let buf: &[u8] = &dev_id_bytes.to_be_bytes(); let buf: &[u8] = &dev_id_bytes.to_be_bytes();
self.can_tx.send(PackageModel::new(PackageId::DevicePowerOnRequest, buf).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(PackageId::DevicePowerOnRequest, buf).unwrap())
.unwrap();
let mut map_lock = self.device_state_map.lock().unwrap(); let mut map_lock = self.device_state_map.lock().unwrap();
*map_lock.get_mut(&dev_id).unwrap() = SwitchState::Unknown; *map_lock.get_mut(&dev_id).unwrap() = SwitchState::Unknown;
// TODO: potentially change bus logic -> remove acceptance and verification of power off/on, since status is simply called in next step anyway // TODO: potentially change bus logic -> remove acceptance and verification of power off/on, since status is simply called in next step anyway
@ -124,7 +120,9 @@ impl PCDU {
return if let Ok(dev_id) = DeviceId::try_from(switch_id) { return if let Ok(dev_id) = DeviceId::try_from(switch_id) {
let dev_id_bytes = dev_id as u8; let dev_id_bytes = dev_id as u8;
let buf: &[u8] = &dev_id_bytes.to_be_bytes(); let buf: &[u8] = &dev_id_bytes.to_be_bytes();
self.can_tx.send(PackageModel::new(PackageId::DevicePowerOffRequest, buf).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(PackageId::DevicePowerOffRequest, buf).unwrap())
.unwrap();
let mut map_lock = self.device_state_map.lock().unwrap(); let mut map_lock = self.device_state_map.lock().unwrap();
*map_lock.get_mut(&dev_id).unwrap() = SwitchState::Unknown; *map_lock.get_mut(&dev_id).unwrap() = SwitchState::Unknown;
self.can_rx.recv().unwrap(); self.can_rx.recv().unwrap();
@ -139,7 +137,9 @@ impl PCDU {
let _switch_id: SwitchId = *dev_id as u16; let _switch_id: SwitchId = *dev_id as u16;
let dev_id_bytes = *dev_id as u8; let dev_id_bytes = *dev_id as u8;
let buf: &[u8] = &dev_id_bytes.to_be_bytes(); let buf: &[u8] = &dev_id_bytes.to_be_bytes();
self.can_tx.send(PackageModel::new(PackageId::DevicePowerStatusRequest, buf).unwrap()).unwrap(); self.can_tx
.send(PackageModel::new(PackageId::DevicePowerStatusRequest, buf).unwrap())
.unwrap();
match self.can_rx.recv_timeout(Duration::from_secs(10)) { match self.can_rx.recv_timeout(Duration::from_secs(10)) {
Ok(msg) => { Ok(msg) => {
if msg.package_id() == PackageId::DevicePowerStatusResponse if msg.package_id() == PackageId::DevicePowerStatusResponse

View File

@ -1,8 +1,12 @@
use crate::action;
use crate::action::ActionRequest;
use crate::requests::{Request, RequestWithToken}; use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore}; use crate::tmtc::{PusTcSource, TmStore};
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use eurosim_obsw::{hk_err, tmtc_err, CustomPusServiceId, TEST_EVENT};
use log::{info, warn}; use log::{info, warn};
use satrs_core::events::EventU32; use satrs_core::events::EventU32;
use satrs_core::hk::{CollectionIntervalFactor, HkRequest}; use satrs_core::hk::{CollectionIntervalFactor, HkRequest, UniqueId};
use satrs_core::mode::{ModeAndSubmode, ModeCommand, ModeRequest}; use satrs_core::mode::{ModeAndSubmode, ModeCommand, ModeRequest};
use satrs_core::params::Params; use satrs_core::params::Params;
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
@ -18,21 +22,20 @@ use satrs_core::pus::verification::{
use satrs_core::pus::{event, GenericTcCheckError}; use satrs_core::pus::{event, GenericTcCheckError};
use satrs_core::res_code::ResultU16; use satrs_core::res_code::ResultU16;
use satrs_core::spacepackets::ecss::{scheduling, PusServiceId}; use satrs_core::spacepackets::ecss::{scheduling, PusServiceId};
use satrs_core::spacepackets::CcsdsPacket;
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper; use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider, TargetId}; use satrs_core::tmtc::{AddressableId, PusServiceProvider, TargetId};
use satrs_core::{ use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider, spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader, spacepackets::time::TimeWriter, spacepackets::SpHeader,
}; };
use eurosim_obsw::{hk_err, tmtc_err, CustomPusServiceId, TEST_EVENT};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::hash::Hash;
use std::rc::Rc; use std::rc::Rc;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem}; use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
use crate::action;
use crate::action::ActionRequest;
pub struct PusReceiver { pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper, pub tm_helper: PusTmWithCdsShortHelper,
@ -48,6 +51,8 @@ pub struct PusTmArgs {
pub tm_store: TmStore, pub tm_store: TmStore,
/// All verification reporting is done with this reporter. /// All verification reporting is done with this reporter.
pub verif_reporter: StdVerifReporterWithSender, pub verif_reporter: StdVerifReporterWithSender,
/// Sequence count provider for TMs sent from within pus demultiplexer
pub seq_count_provider: SeqCountProviderSyncClonable,
} }
impl PusTmArgs { impl PusTmArgs {
@ -64,6 +69,7 @@ pub struct PusTcArgs {
pub tc_source: PusTcSource, pub tc_source: PusTcSource,
pub event_sender: Sender<(EventU32, Option<Params>)>, pub event_sender: Sender<(EventU32, Option<Params>)>,
pub scheduler: Rc<RefCell<PusScheduler>>, pub scheduler: Rc<RefCell<PusScheduler>>,
pub apid_map: HashMap<u16, Sender<RequestWithToken>>,
} }
struct TimeStampHelper { struct TimeStampHelper {
@ -113,6 +119,8 @@ impl PusServiceProvider for PusReceiver {
_header: &SpHeader, _header: &SpHeader,
pus_tc: &PusTc, pus_tc: &PusTc,
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let apid = pus_tc.apid();
println!("{:?}", apid);
let init_token = self.tm_args.verif_reporter.add_tc(pus_tc); let init_token = self.tm_args.verif_reporter.add_tc(pus_tc);
self.stamp_helper.update_from_now(); self.stamp_helper.update_from_now();
let accepted_token = self let accepted_token = self
@ -121,25 +129,29 @@ impl PusServiceProvider for PusReceiver {
.acceptance_success(init_token, Some(self.stamp_helper.stamp())) .acceptance_success(init_token, Some(self.stamp_helper.stamp()))
.expect("Acceptance success failure"); .expect("Acceptance success failure");
let service = PusServiceId::try_from(service); let service = PusServiceId::try_from(service);
match service {
// services handled by pus handler return none, otherwise request is returned to be forwarded via apid
if let Some(request) = match service {
Ok(standard_service) => match standard_service { Ok(standard_service) => match standard_service {
PusServiceId::Test => self.handle_test_service(pus_tc, accepted_token), PusServiceId::Test => self.handle_test_service(pus_tc, accepted_token),
PusServiceId::Housekeeping => self.handle_hk_request(pus_tc, accepted_token), PusServiceId::Housekeeping => self.handle_hk_request(pus_tc, accepted_token),
PusServiceId::Event => self.handle_event_request(pus_tc, accepted_token), PusServiceId::Event => self.handle_event_request(pus_tc, accepted_token),
PusServiceId::Scheduling => self.handle_scheduled_tc(pus_tc, accepted_token), PusServiceId::Scheduling => self.handle_scheduled_tc(pus_tc, accepted_token),
PusServiceId::Action => self.handle_action_request(pus_tc, accepted_token), PusServiceId::Action => self.handle_function_request(pus_tc, accepted_token),
_ => self _ => {
.tm_args self.tm_args
.verif_reporter .verif_reporter
.start_failure( .start_failure(
accepted_token, accepted_token,
FailParams::new( FailParams::new(
Some(self.stamp_helper.stamp()), Some(self.stamp_helper.stamp()),
&tmtc_err::PUS_SERVICE_NOT_IMPLEMENTED, &tmtc_err::PUS_SERVICE_NOT_IMPLEMENTED,
Some(&[standard_service as u8]), Some(&[standard_service as u8]),
), ),
) )
.expect("Start failure verification failed"), .expect("Start failure verification failed");
None
}
}, },
Err(e) => { Err(e) => {
if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) { if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) {
@ -147,7 +159,7 @@ impl PusServiceProvider for PusReceiver {
CustomPusServiceId::Mode => { CustomPusServiceId::Mode => {
self.handle_mode_service(pus_tc, accepted_token) self.handle_mode_service(pus_tc, accepted_token)
} }
CustomPusServiceId::Health => {} CustomPusServiceId::Health => None,
} }
} else { } else {
self.tm_args self.tm_args
@ -160,16 +172,25 @@ impl PusServiceProvider for PusReceiver {
Some(&[e.number]), Some(&[e.number]),
), ),
) )
.expect("Start failure verification failed") .expect("Start failure verification failed");
None
} }
} }
} {
if let Some(sender) = self.tc_args.apid_map.clone().get(&apid) {
sender.send(request).expect("error sending request");
}
} }
Ok(()) Ok(())
} }
} }
impl PusReceiver { impl PusReceiver {
fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_test_service(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) -> Option<RequestWithToken> {
match PusPacket::subservice(pus_tc) { match PusPacket::subservice(pus_tc) {
1 => { 1 => {
info!("Received PUS ping command TC[17,1]"); info!("Received PUS ping command TC[17,1]");
@ -179,12 +200,13 @@ impl PusReceiver {
.verif_reporter .verif_reporter
.start_success(token, Some(self.stamp_helper.stamp())) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None); let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None, self.tm_args.seq_count_provider.get());
let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply); let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply);
self.tm_args self.tm_args
.tm_tx .tm_tx
.send(addr) .send(addr)
.expect("Sending TM to TM funnel failed"); .expect("Sending TM to TM funnel failed");
self.tm_args.seq_count_provider.increment();
self.tm_args self.tm_args
.verif_reporter .verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp())) .completion_success(start_token, Some(self.stamp_helper.stamp()))
@ -220,9 +242,14 @@ impl PusReceiver {
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
} }
} }
None
} }
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_hk_request(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) -> Option<RequestWithToken> {
if pus_tc.user_data().is_none() { if pus_tc.user_data().is_none() {
self.tm_args self.tm_args
.verif_reporter .verif_reporter
@ -235,15 +262,11 @@ impl PusReceiver {
), ),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return None;
} }
let user_data = pus_tc.user_data().unwrap(); let user_data = pus_tc.user_data().unwrap();
if user_data.len() < 8 { if user_data.len() < 4 {
let err = if user_data.len() < 4 { let err = &hk_err::UNIQUE_ID_MISSING;
&hk_err::TARGET_ID_MISSING
} else {
&hk_err::UNIQUE_ID_MISSING
};
self.tm_args self.tm_args
.verif_reporter .verif_reporter
.start_failure( .start_failure(
@ -251,27 +274,19 @@ impl PusReceiver {
FailParams::new(Some(self.stamp_helper.stamp()), err, None), FailParams::new(Some(self.stamp_helper.stamp()), err, None),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return None;
} }
let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self let apid = pus_tc.sp_header().apid();
.tc_args
.request_map match apid {
.contains_key(&addressable_id.target_id) _ => {}
{
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&hk_err::UNKNOWN_TARGET_ID,
None,
),
)
.expect("Sending start failure TM failed");
return;
} }
let unique_id: UniqueId =
u32::from_be_bytes(user_data[0..4].try_into().unwrap()) as UniqueId;
/*
let send_request = |request: HkRequest| { let send_request = |request: HkRequest| {
let sender = self let sender = self
.tc_args .tc_args
@ -279,40 +294,58 @@ impl PusReceiver {
.get(&addressable_id.target_id) .get(&addressable_id.target_id)
.unwrap(); .unwrap();
sender sender
.send(RequestWithToken(Request::HkRequest(request), token)) .send(RequestWithToken(Request::HkRequest(request), Some(token)))
.unwrap_or_else(|_| panic!("Sending HK request {request:?} failed")); .unwrap_or_else(|_| panic!("Sending HK request {request:?} failed"));
}; };
if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableHkGeneration as u8 {
send_request(HkRequest::Enable(addressable_id)); */
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableHkGeneration as u8 {
send_request(HkRequest::Disable(addressable_id)); if let Some(request) =
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 { if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableHkGeneration as u8 {
send_request(HkRequest::OneShot(addressable_id)); //send_request(HkRequest::Enable(addressable_id));
} else if PusPacket::subservice(pus_tc) Some(HkRequest::Enable(unique_id))
== hk::Subservice::TcModifyHkCollectionInterval as u8 } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableHkGeneration as u8 {
{ //send_request(HkRequest::Disable(addressable_id));
if user_data.len() < 12 { Some(HkRequest::Enable(unique_id))
self.tm_args } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 {
.verif_reporter //send_request(HkRequest::OneShot(addressable_id));
.start_failure( Some(HkRequest::OneShot(unique_id))
token, } else if PusPacket::subservice(pus_tc)
FailParams::new( == hk::Subservice::TcModifyHkCollectionInterval as u8
Some(self.stamp_helper.stamp()), {
&hk_err::COLLECTION_INTERVAL_MISSING, if user_data.len() < 12 {
None, self.tm_args
), .verif_reporter
) .start_failure(
.expect("Sending start failure TM failed"); token,
return; FailParams::new(
Some(self.stamp_helper.stamp()),
&hk_err::COLLECTION_INTERVAL_MISSING,
None,
),
)
.expect("Sending start failure TM failed");
return None;
}
//send_request(HkRequest::ModifyCollectionInterval(addressable_id,CollectionIntervalFactor::from_be_bytes(user_data[8..12].try_into().unwrap())));
Some(HkRequest::ModifyCollectionInterval(
unique_id,
CollectionIntervalFactor::from_be_bytes(user_data[8..12].try_into().unwrap()),
))
} else {
return None;
} }
send_request(HkRequest::ModifyCollectionInterval( {
addressable_id, return Some(RequestWithToken(Request::HkRequest(request), Some(token)));
CollectionIntervalFactor::from_be_bytes(user_data[8..12].try_into().unwrap()),
));
} }
None
} }
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_event_request(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) -> Option<RequestWithToken> {
let send_start_failure = |vr: &mut StdVerifReporterWithSender, let send_start_failure = |vr: &mut StdVerifReporterWithSender,
timestamp: &[u8], timestamp: &[u8],
failure_code: &ResultU16, failure_code: &ResultU16,
@ -321,7 +354,7 @@ impl PusReceiver {
token, token,
FailParams::new(Some(timestamp), failure_code, failure_data), FailParams::new(Some(timestamp), failure_code, failure_data),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
}; };
let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| { let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| {
vr.start_success(token, Some(timestamp)) vr.start_success(token, Some(timestamp))
@ -334,7 +367,7 @@ impl PusReceiver {
&tmtc_err::NOT_ENOUGH_APP_DATA, &tmtc_err::NOT_ENOUGH_APP_DATA,
None, None,
); );
return; return None;
} }
let app_data = pus_tc.user_data().unwrap(); let app_data = pus_tc.user_data().unwrap();
if app_data.len() < 4 { if app_data.len() < 4 {
@ -344,7 +377,7 @@ impl PusReceiver {
&tmtc_err::NOT_ENOUGH_APP_DATA, &tmtc_err::NOT_ENOUGH_APP_DATA,
None, None,
); );
return; return None;
} }
let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap())); let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
match PusPacket::subservice(pus_tc).try_into() { match PusPacket::subservice(pus_tc).try_into() {
@ -383,9 +416,14 @@ impl PusReceiver {
); );
} }
} }
None
} }
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_scheduled_tc(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) -> Option<RequestWithToken> {
let subservice = match pus_11_generic_tc_check(pus_tc) { let subservice = match pus_11_generic_tc_check(pus_tc) {
Ok(subservice) => subservice, Ok(subservice) => subservice,
Err(e) => match e { Err(e) => match e {
@ -401,7 +439,7 @@ impl PusReceiver {
), ),
) )
.expect("could not sent verification error"); .expect("could not sent verification error");
return; return None;
} }
GenericTcCheckError::InvalidSubservice => { GenericTcCheckError::InvalidSubservice => {
self.tm_args self.tm_args
@ -415,7 +453,7 @@ impl PusReceiver {
), ),
) )
.expect("could not sent verification error"); .expect("could not sent verification error");
return; return None;
} }
}, },
}; };
@ -509,9 +547,14 @@ impl PusReceiver {
} }
_ => {} _ => {}
} }
None
} }
fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) { fn handle_mode_service(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) -> Option<RequestWithToken> {
let mut app_data_len = 0; let mut app_data_len = 0;
let app_data = pus_tc.user_data(); let app_data = pus_tc.user_data();
if app_data.is_some() { if app_data.is_some() {
@ -554,7 +597,10 @@ impl PusReceiver {
None => warn!("not mode request recipient for target ID {target_id} found"), None => warn!("not mode request recipient for target ID {target_id} found"),
Some(sender_to_recipient) => { Some(sender_to_recipient) => {
sender_to_recipient sender_to_recipient
.send(RequestWithToken(Request::ModeRequest(mode_request), token)) .send(RequestWithToken(
Request::ModeRequest(mode_request),
Some(token),
))
.expect("sending mode request failed"); .expect("sending mode request failed");
} }
}; };
@ -585,23 +631,20 @@ impl PusReceiver {
.try_into() .try_into()
.unwrap(), .unwrap(),
) )
.unwrap(); .unwrap();
forward_mode_request( forward_mode_request(target_id, ModeRequest::SetMode(mode_submode));
target_id,
ModeRequest::SetMode(ModeCommand::new(target_id, mode_submode)),
);
} }
Subservice::TcReadMode => { Subservice::TcReadMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::ReadMode(target_id)); forward_mode_request(target_id, ModeRequest::ReadMode);
} }
Subservice::TcAnnounceMode => { Subservice::TcAnnounceMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceMode(target_id)); forward_mode_request(target_id, ModeRequest::AnnounceMode);
} }
Subservice::TcAnnounceModeRecursive => { Subservice::TcAnnounceModeRecursive => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceModeRecursive(target_id)); forward_mode_request(target_id, ModeRequest::AnnounceModeRecursive);
} }
_ => { _ => {
warn!("Can not process mode request with subservice {subservice:?}"); warn!("Can not process mode request with subservice {subservice:?}");
@ -618,22 +661,28 @@ impl PusReceiver {
} else { } else {
invalid_subservice_handler(); invalid_subservice_handler();
} }
None
} }
fn handle_action_request( fn handle_function_request(
&mut self, &mut self,
pus_tc: &PusTc, pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>, token: VerificationToken<TcStateAccepted>,
) { ) -> Option<RequestWithToken> {
if pus_tc.user_data().is_none() { if pus_tc.user_data().is_none() {
self.stamp_helper.stamper.update_from_now().unwrap(); self.stamp_helper.stamper.update_from_now().unwrap();
self.tm_args.verif_reporter self.tm_args
.verif_reporter
.start_failure( .start_failure(
token, token,
FailParams::new(Some(&self.stamp_helper.stamp()), &tmtc_err::NOT_ENOUGH_APP_DATA, None), FailParams::new(
Some(&self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return None;
} }
let send_request = |request: ActionRequest| match request { let send_request = |request: ActionRequest| match request {
@ -641,23 +690,34 @@ impl PusReceiver {
let id = target_id as u32; let id = target_id as u32;
let sender = self.tc_args.request_map.get(&id).unwrap(); let sender = self.tc_args.request_map.get(&id).unwrap();
sender sender
.send(RequestWithToken(Request::ActionRequest(request), token)) .send(RequestWithToken(
Request::ActionRequest(request),
Some(token),
))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request)); .unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
} }
ActionRequest::OrientationRequest(target_id) => { ActionRequest::OrientationRequest(target_id) => {
let id = target_id as u32; let id = target_id as u32;
let sender = self.tc_args.request_map.get(&id).unwrap(); let sender = self.tc_args.request_map.get(&id).unwrap();
sender sender
.send(RequestWithToken(Request::ActionRequest(request), token)) .send(RequestWithToken(
Request::ActionRequest(request),
Some(token),
))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request)); .unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
} }
_ => {} _ => {}
}; };
if PusPacket::subservice(pus_tc) == action::Subservice::ImageRequest as u8 { if PusPacket::subservice(pus_tc) == action::Subservice::ImageRequest as u8 {
send_request(ActionRequest::ImageRequest(PldSubsystem)); //send_request(ActionRequest::ImageRequest(PldSubsystem));
return Some(RequestWithToken(
Request::ActionRequest(ActionRequest::ImageRequest(PldSubsystem)),
Some(token),
));
} else if PusPacket::subservice(pus_tc) == action::Subservice::OrientationRequest as u8 { } else if PusPacket::subservice(pus_tc) == action::Subservice::OrientationRequest as u8 {
send_request(ActionRequest::OrientationRequest(AcsSubsystem)); send_request(ActionRequest::OrientationRequest(AcsSubsystem));
} }
None
} }
} }

View File

@ -1,19 +1,15 @@
use crate::action::ActionRequest;
use satrs_core::hk::HkRequest; use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest; use satrs_core::mode::ModeRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken}; use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
use crate::action::ActionRequest;
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[non_exhaustive] #[non_exhaustive]
pub enum Request { pub enum Request {
HkRequest(HkRequest), HkRequest(HkRequest),
ModeRequest(ModeRequest), ModeRequest(ModeRequest),
ActionRequest(ActionRequest) ActionRequest(ActionRequest),
} }
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct RequestWithToken(pub Request, pub VerificationToken<TcStateAccepted>); pub struct RequestWithToken(pub Request, pub Option<VerificationToken<TcStateAccepted>>);
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct RequestWithOptionalToken(pub Request, pub Option<VerificationToken<TcStateAccepted>>);

View File

@ -19,12 +19,23 @@ use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
use satrs_core::pus::event_man::EventRequestWithToken; use satrs_core::pus::event_man::EventRequestWithToken;
use satrs_core::pus::scheduling::{PusScheduler, TcInfo}; use satrs_core::pus::scheduling::{PusScheduler, TcInfo};
use satrs_core::pus::verification::StdVerifReporterWithSender; use satrs_core::pus::verification::StdVerifReporterWithSender;
use satrs_core::seq_count::SeqCountProviderSyncClonable;
use satrs_core::spacepackets::tc::PUS_TC_MIN_LEN_WITHOUT_APP_DATA;
use satrs_core::spacepackets::{ecss::PusPacket, tc::PusTc, tm::PusTm, SpHeader}; use satrs_core::spacepackets::{ecss::PusPacket, tc::PusTc, tm::PusTm, SpHeader};
use satrs_core::tmtc::{ use satrs_core::tmtc::{
CcsdsDistributor, CcsdsError, PusServiceProvider, ReceivesCcsdsTc, ReceivesEcssPusTc, CcsdsDistributor, CcsdsError, PusServiceProvider, ReceivesCcsdsTc, ReceivesEcssPusTc,
}; };
pub const PUS_APID: u16 = 0x02; pub const PUS_APID: u16 = 0x02;
pub const PLD_APID: u16 = 0x30;
pub const PWR_APID: u16 = 0x40;
pub const AOCS_APID: u16 = 0x50;
pub const AOCS_HK_APID: u16 = 0x51;
pub const MGM_APID: u16 = 0x52;
pub const CSS_APID: u16 = 0x53;
pub const STR_APID: u16 = 0x54;
pub const MGT_APID: u16 = 0x55;
pub const RWL_APID: u16 = 0x56;
pub struct OtherArgs { pub struct OtherArgs {
pub sock_addr: SocketAddr, pub sock_addr: SocketAddr,
@ -32,6 +43,8 @@ pub struct OtherArgs {
pub event_sender: Sender<(EventU32, Option<Params>)>, pub event_sender: Sender<(EventU32, Option<Params>)>,
pub event_request_tx: Sender<EventRequestWithToken>, pub event_request_tx: Sender<EventRequestWithToken>,
pub request_map: HashMap<u32, Sender<RequestWithToken>>, pub request_map: HashMap<u32, Sender<RequestWithToken>>,
pub apid_map: HashMap<u16, Sender<RequestWithToken>>,
pub seq_count_provider: SeqCountProviderSyncClonable,
} }
pub struct TmArgs { pub struct TmArgs {
@ -173,6 +186,7 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
tm_tx: tm_args.tm_sink_sender, tm_tx: tm_args.tm_sink_sender,
tm_store: tm_args.tm_store.clone(), tm_store: tm_args.tm_store.clone(),
verif_reporter: args.verif_reporter, verif_reporter: args.verif_reporter,
seq_count_provider: args.seq_count_provider.clone(),
}; };
let pus_tc_args = PusTcArgs { let pus_tc_args = PusTcArgs {
event_request_tx: args.event_request_tx, event_request_tx: args.event_request_tx,
@ -180,6 +194,7 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
tc_source: tc_args.tc_source.clone(), tc_source: tc_args.tc_source.clone(),
event_sender: args.event_sender, event_sender: args.event_sender,
scheduler: sched_clone, scheduler: sched_clone,
apid_map: args.apid_map,
}; };
let mut pus_receiver = PusReceiver::new(PUS_APID, pus_tm_args, pus_tc_args); let mut pus_receiver = PusReceiver::new(PUS_APID, pus_tm_args, pus_tc_args);