added stuff, preparing for change of packageids

This commit is contained in:
lkoester
2023-01-18 10:04:36 +01:00
parent 154e4ef4ce
commit 7ae4701e5f
20 changed files with 1907 additions and 307 deletions

View File

@ -1,33 +1,218 @@
use std::sync::Receiver;
use std::sync::Sender;
use satrs_core::pool::StoreAddr;
#[cfg(feature = "can")]
use socketcan;
use crate::tmtc::TmStore;
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
struct CanThreadArgs {
rx_can_request: Receiver<HkRequest>,
tm_sender: Sender<StoreAddr>,
tm_store: TmStore,
use crate::device_handler::DeviceState;
use crate::can_ids::{
can_id_to_package_id, package_id_to_can_id, value_to_package_id, DeviceId, PackageId,
PackageModel, SenderReceiverThread, ThreadId,
};
use embedded_can::{self, Frame};
use log::{debug, error, info, trace, warn};
use serde::{Deserialize, Serialize};
use serde_json;
use socketcan::{errors, frame, socket, CanFrame, Socket};
use std::collections::HashMap;
use std::ffi::c_void;
use std::io;
use std::mem::size_of;
use std::sync::mpsc::Sender;
pub struct CanRxHandler {
interface: &'static str,
socket: socket::CanSocket,
//frame_id_to_sender_id: HashMap<embedded_can::Id, u32>, // double hash map: frame id -> receiver id -> sender handle
can_senders: HashMap<ThreadId, Sender<PackageModel>>,
//dismissed_ids: Vec<embedded_can::Id>,
package_map: HashMap<PackageId, SenderReceiverThread>,
//packet_id_to_sender_id: HashMap<PackageId, ThreadId>,
}
pub type CollInterval = u32;
pub enum HkIds {
AcsHk = 0
}
pub enum HkRequest {
OneShot(HkIds),
Enable(HkIds, CollInterval), // periodic
Disable(HkIds, CollInterval)
impl CanRxHandler {
pub fn new_socket(
interface: &'static str,
//frame_id_to_sender_id: HashMap<embedded_can::Id, u32>,
can_senders: HashMap<ThreadId, Sender<PackageModel>>,
package_map: HashMap<PackageId, SenderReceiverThread>,
) -> Result<CanRxHandler, ()> {
let socket = socket::CanSocket::open(&interface);
if let Ok(socket) = socket {
Ok(CanRxHandler {
interface,
socket,
can_senders,
package_map,
})
} else {
Err(())
}
}
/*
pub fn new_socket_with_filter(
interface: &'static str,
can_id_to_sender_id: HashMap<embedded_can::Id, u32>,
can_senders: HashMap<u32, Sender<Vec<u8>>>,
can_filters: &[socket::CanFilter],
) -> Result<CanRxHandler, ()> {
let can_wrapper = Self::new_socket(interface, can_id_to_sender_id, can_senders)?;
let filter_result = can_wrapper.socket.set_filters(can_filters);
if let Err(e) = filter_result {
warn!("Can Bus set filter error: {}", e);
}
Ok(can_wrapper)
}
*/
pub fn read_frame(&self) -> io::Result<frame::CanFrame> {
let frame = self.socket.read_frame();
if let Err(e) = &frame {
warn!("CAN bus read error: {}", e);
}
frame
}
pub fn rx_socket(&self) -> Option<CanFrame> {
let frame = self.socket.read_frame().ok()?;
info!("Can Frame read: {:?}.", frame);
return Some(frame);
/*
if let Ok(frame) = frame {
println!("Frame received: {:?}", frame);
return Some(frame);
}
None
*/
}
pub fn forward_frame(&self, frame: CanFrame) {
let frame_id = can_id_to_package_id(frame.id());
info!("Frame forwarding with id: {:?}", frame_id);
if let Some(frame_id) = frame_id {
if self.package_map.contains_key(&frame_id) {
let value = self.package_map.get(&frame_id).unwrap();
if value.get_sender() != DeviceId::OBC {
let message_sender = self.can_senders.get(&value.get_thread()).unwrap();
let data = frame.data();
let message =
PackageModel::new(frame_id, data).expect("Error generating message.");
message_sender.send(message).expect(&*format!(
"Failure sending can bus frame to thread{:?}, frame id {:?}",
value.get_thread(),
frame_id
));
}
}
}
}
/*
pub fn set_filter(&self, filters: &[socket::CanFilter]) -> io::Result<()> {
info!("Setting filter with filter {:?}", filters);
let result = self.socket.set_filters(filters);
if let Err(e) = result {
warn!("Can bus socket filter set error: {}", e);
}
Ok(())
}
*/
}
// impl CanThreadArgs {
// fn operation(&self) {
//
// }
// }
fn can_thread(canthreadargs: CanThreadArgs) {
// handle tc requests
// request from devices
// send tms
}
pub struct CanTxHandler {
interface: &'static str,
socket: socket::CanSocket,
thread_id: ThreadId,
package_map: HashMap<PackageId, SenderReceiverThread>,
}
impl CanTxHandler {
pub fn new_socket(
interface: &'static str,
thread_id: ThreadId,
package_map: HashMap<PackageId, SenderReceiverThread>,
) -> Result<CanTxHandler, ()> {
let socket = socket::CanSocket::open(&interface);
if let Ok(socket) = socket {
socket.filter_drop_all().unwrap(); // tx nodes cannot receive data
Ok(CanTxHandler {
interface,
socket,
thread_id,
package_map,
})
} else {
Err(())
}
}
pub fn tx_socket(&self, package_id: PackageId, data: &[u8]) {
if self.package_map.contains_key(&package_id) {
let value = self.package_map.get(&package_id).unwrap();
if value.get_sender() == DeviceId::OBC {
if value.get_thread() == self.thread_id {
if data.len() <= 8 {
let frame_id = package_id_to_can_id(&package_id);
let frame = CanFrame::new(frame_id, data);
if let Some(frame) = frame {
self.socket
.write_frame(&frame)
.expect("Error writing frame.");
}
} else {
warn!(
"Message dismissed, data length ({:?}) exceeds 8 bytes",
data.len()
);
}
} else {
warn!(
"Message dismissed, mismatched thread id: {:?}",
value.get_thread()
);
}
} else {
warn!(
"Message dismissed, wrong sender id: {:?}",
value.get_sender()
);
}
} else {
warn!("Message dismissed, wrong package id: {:?}", package_id);
}
}
/*
pub fn tx_socket_from_frame(&self, frame: frame::CanFrame) -> io::Result<()> {
let frame_id = frame.id();
if !self.allowed_frame_ids.contains(&frame_id) {
warn!(
"Requested frame Id {:?} not allowed for current thread",
frame.id()
);
} else if let Err(e) = self.socket.write_frame(&frame) {
warn!("CAN bus write error: {}", e);
}
return Ok(());
}
pub fn tx_socket_from_data(
&self,
frame_id: embedded_can::Id,
frame_data: &[u8],
) -> io::Result<()> {
let frame = frame::CanFrame::new(frame_id, &frame_data).expect(&*format!(
"Failure sending can bus frame with id {:?}",
frame_id
));
self.tx_socket_from_frame(frame)
}
*/
}
pub fn open_socket(interface: &str) -> Result<socket::CanSocket, errors::CanSocketOpenError> {
let socket = socket::CanSocket::open(&interface);
return socket;
}

258
src/can_ids.rs Normal file
View File

@ -0,0 +1,258 @@
use embedded_can::{Id, StandardId};
use log::warn;
use std::collections::HashMap;
use std::fs;
use std::sync::mpsc::Sender;
use std::thread::Thread;
use crate::device_handler::{DeviceProperties, DeviceState, DeviceType};
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::{FromPrimitive, ToPrimitive};
use strum::IntoEnumIterator; // 0.17.1
use strum_macros::EnumIter; // 0.17.1
#[derive(Debug, EnumIter, Eq, Hash, PartialEq, Copy, Clone, FromPrimitive)]
pub enum ThreadId {
AOCSThread,
TMThread,
}
#[derive(Debug, EnumIter, Eq, Hash, PartialEq, Copy, Clone, FromPrimitive)]
pub enum PackageId {
PCDUStatusRequest = 10,
DevicePowerOnRequest = 11,
DevicePowerOffRequest = 12,
DevicePowerStatusRequest = 13,
PCDUStatusResponse = 20,
DevicePowerStatusResponse = 21,
DevicePowerOnRequestConfirmation = 22,
DevicePowerOffRequestConfirmation = 23,
DevicePowerOnConfirmation = 24,
DevicePowerOffConfirmation = 25,
AOCSControlMGT1 = 31,
AOCSControlMGT2 = 32,
AOCSControlMGT3 = 33,
AOCSControlRWL1 = 34,
AOCSControlRWL2 = 35,
AOCSControlRWL3 = 36,
AOCSDataMGM1 = 41,
AOCSDataMGM2 = 42,
AOCSDataMGM3 = 43,
AOCSDataSunSensor1 = 44,
AOCSDataSunSensor2 = 45,
AOCSDataSunSensor3 = 46,
AOCSDataStarTracker = 47,
HousekeepingRequest = 61,
HousekeepingMGM1 = 62,
HousekeepingMGM2 = 63,
HousekeepingMGM3 = 64,
HousekeepingSunSensor1 = 65,
HousekeepingSunSensor2 = 66,
HousekeepingSunSensor3 = 67,
HousekeepingStarTracker = 68,
HousekeepingMGT1 = 69,
HousekeepingMGT2 = 70,
HousekeepingMGT3 = 71,
HousekeepingRWL1 = 72,
HousekeepingRWL2 = 73,
HousekeepingRWL3 = 74,
CameraImageRequest = 101,
CameraImageRequestConfirmation = 102,
CameraImageExecutionStart = 103,
CameraImageExectutionEnd = 104,
}
#[derive(Debug, EnumIter, Eq, Hash, PartialEq, Copy, Clone, FromPrimitive)]
pub enum DeviceId {
OBC = 1,
PCDU = 2,
MGM1 = 3,
MGM2 = 4,
MGM3 = 5,
SunSensor1 = 6,
SunSensor2 = 7,
SunSensor3 = 8,
StarTracker = 9,
MGT1 = 10,
MGT2 = 11,
MGT3 = 12,
RWL1 = 13,
RWL2 = 14,
RWL3 = 15,
Camera = 16,
All = 17,
}
#[derive(Debug)]
pub struct PackageModel {
package_id: PackageId,
data: Vec<u8>,
}
impl PackageModel {
pub fn new(package_id: PackageId, data: &[u8]) -> Result<PackageModel, ()> {
if data.len() > 8 {
warn!("Data exceeds maximum length.");
return Err(());
}
let vec = Vec::from(data);
return Ok(PackageModel {
package_id,
data: vec,
});
}
pub fn package_id(&self) -> PackageId {
self.package_id
}
pub fn data(&self) -> &Vec<u8> {
&self.data
}
}
#[derive(Debug, Copy, Clone)]
pub struct SenderReceiverThread {
sender: DeviceId,
receiver: DeviceId,
thread: ThreadId,
}
impl SenderReceiverThread {
pub fn new(sender: DeviceId, receiver: DeviceId, thread: ThreadId) -> SenderReceiverThread {
SenderReceiverThread {
sender,
receiver,
thread,
}
}
pub fn get_sender(&self) -> DeviceId {
self.sender
}
pub fn get_receiver(&self) -> DeviceId {
self.receiver
}
pub fn get_thread(&self) -> ThreadId {
self.thread
}
}
pub fn load_package_ids() -> HashMap<PackageId, SenderReceiverThread> {
let mut package_map: HashMap<PackageId, SenderReceiverThread> = HashMap::new();
let properties = vec![
SenderReceiverThread::new(DeviceId::OBC, DeviceId::PCDU, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::PCDU, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::PCDU, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::PCDU, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::PCDU, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::MGT1, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::MGT2, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::MGT3, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::RWL1, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::RWL2, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::RWL3, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::MGM1, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::MGM2, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::MGM3, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::SunSensor1, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::SunSensor2, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::SunSensor3, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::All, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGM1, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGM2, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGM3, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::SunSensor1, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::SunSensor2, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::SunSensor3, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGT1, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGT2, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::MGT3, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::RWL1, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::RWL2, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::RWL3, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::Camera, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::TMThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::TMThread),
];
let mut i = 0;
for id in PackageId::iter() {
let value = properties.get(i).unwrap();
package_map.insert(id, *value);
i += 1;
}
return package_map;
}
pub fn load_device_ids() {
let mut package_map: HashMap<DeviceId, DeviceProperties> = HashMap::new();
let properties = vec![
DeviceProperties::new(DeviceType::OBC, DeviceState::Unknown),
DeviceProperties::new(DeviceType::PCDU, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGM, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGM, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGM, DeviceState::Unknown),
DeviceProperties::new(DeviceType::CRS, DeviceState::Unknown),
DeviceProperties::new(DeviceType::CRS, DeviceState::Unknown),
DeviceProperties::new(DeviceType::CRS, DeviceState::Unknown),
DeviceProperties::new(DeviceType::STR, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGT, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGT, DeviceState::Unknown),
DeviceProperties::new(DeviceType::MGT, DeviceState::Unknown),
DeviceProperties::new(DeviceType::RWL, DeviceState::Unknown),
DeviceProperties::new(DeviceType::RWL, DeviceState::Unknown),
DeviceProperties::new(DeviceType::RWL, DeviceState::Unknown),
DeviceProperties::new(DeviceType::CAM, DeviceState::Unknown),
];
let mut i = 0;
for id in DeviceId::iter() {
let value = properties.get(i).unwrap();
package_map.insert(id, *value);
i += 1;
}
}
//TODO: change ids from u32 to embeddedcan ids
pub fn package_id_to_value(packageid: &PackageId) -> u32 {
*packageid as u32
}
pub fn package_id_to_can_id(packageid: &PackageId) -> Id {
let x = *packageid as u16;
Id::Standard(StandardId::new(x).unwrap())
}
pub fn device_id_to_value(deviceid: &DeviceId) -> u32 {
*deviceid as u32
}
pub fn value_to_package_id(value: u32) -> Option<PackageId> {
let element = FromPrimitive::from_u32(value);
return element;
}
pub fn can_id_to_package_id(value: Id) -> Option<PackageId> {
let buf = match value {
Id::Standard(id) => id.as_raw() as u32,
Id::Extended(id) => id.as_raw(),
};
let element = FromPrimitive::from_u32(buf);
return element;
}
pub fn value_to_device_id(value: u32) -> Option<DeviceId> {
let element = FromPrimitive::from_u32(value);
return element;
}

View File

@ -1,13 +1,13 @@
use crate::tmtc::PUS_APID;
use satrs_core::tmtc::{CcsdsPacketHandler, PusDistributor, ReceivesCcsdsTc};
use spacepackets::{CcsdsPacket, SpHeader};
use crate::tmtc::{MpscStoreAndSendError, PusTcSource, PUS_APID};
use satrs_core::tmtc::{CcsdsPacketHandler, ReceivesCcsdsTc};
use satrs_core::spacepackets::{CcsdsPacket, SpHeader};
pub struct CcsdsReceiver {
pub pus_handler: PusDistributor<()>,
pub tc_source: PusTcSource,
}
impl CcsdsPacketHandler for CcsdsReceiver {
type Error = ();
type Error = MpscStoreAndSendError;
fn valid_apids(&self) -> &'static [u16] {
&[PUS_APID]
@ -19,9 +19,7 @@ impl CcsdsPacketHandler for CcsdsReceiver {
tc_raw: &[u8],
) -> Result<(), Self::Error> {
if sp_header.apid() == PUS_APID {
self.pus_handler
.pass_ccsds(sp_header, tc_raw)
.expect("Handling PUS packet failed");
return self.tc_source.pass_ccsds(sp_header, tc_raw);
}
Ok(())
}

37
src/device_handler.rs Normal file
View File

@ -0,0 +1,37 @@
use socketcan::{errors, frame, socket, CanFrame, Socket};
use crate::can_ids::DeviceId;
#[derive(Copy, Clone)]
pub enum DeviceState {
On,
Off,
Broken,
Unknown,
}
#[derive(Copy, Clone)]
pub enum DeviceType {
MGM,
MGT,
RWL,
STR,
CRS,
OBC,
PCDU,
CAM,
}
#[derive(Copy, Clone)]
pub struct DeviceProperties {
device_type: DeviceType,
device_state: DeviceState,
}
impl DeviceProperties {
pub fn new(device_type: DeviceType, device_state: DeviceState) -> DeviceProperties {
return DeviceProperties {
device_type,
device_state,
};
}
}

243
src/example_main.rs Normal file
View File

@ -0,0 +1,243 @@
use crate::hk::{AcsHkIds, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{
core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, PUS_APID,
};
use satrs_core::event_man::{
EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, SendEventProvider,
};
use satrs_core::events::EventU32;
use satrs_core::pool::{LocalPool, PoolCfg, StoreAddr};
use satrs_core::pus::event_man::{
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken,
PusEventDispatcher,
};
use satrs_core::pus::hk::Subservice;
use satrs_core::pus::verification::{
MpscVerifSender, VerificationReporterCfg, VerificationReporterWithSender,
};
use satrs_core::pus::{EcssTmError, EcssTmSenderCore};
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProvider};
use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT};
use satrs_core::{
spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter,
spacepackets::tm::{PusTm, PusTmSecondaryHeader},
spacepackets::{SequenceFlags, SpHeader}
};
use std::collections::HashMap;
use std::net::{IpAddr, SocketAddr};
use std::sync::mpsc::{channel, TryRecvError};
use std::sync::{mpsc, Arc, RwLock};
use std::thread;
use std::time::Duration;
use crate::EventTmSender;
fn example_main() {
let tm_pool = LocalPool::new(PoolCfg::new(vec![
(30, 32),
(15, 64),
(15, 128),
(15, 256),
(15, 1024),
(15, 2048),
]));
let tm_store = TmStore {
pool: Arc::new(RwLock::new(Box::new(tm_pool))),
};
let tc_pool = LocalPool::new(PoolCfg::new(vec![
(30, 32),
(15, 64),
(15, 128),
(15, 256),
(15, 1024),
(15, 2048),
]));
let tc_store = TcStore {
pool: Arc::new(RwLock::new(Box::new(tc_pool))),
};
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let (tc_source_tx, tc_source_rx) = channel();
let (tm_funnel_tx, tm_funnel_rx) = channel();
let (tm_server_tx, tm_server_rx) = channel();
let verif_sender = MpscVerifSender::new(tm_store.pool.clone(), tm_funnel_tx.clone());
let verif_cfg = VerificationReporterCfg::new(
PUS_APID,
#[allow(clippy::box_default)]
Box::new(SeqCountProviderSyncClonable::default()),
1,
2,
8,
)
.unwrap();
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
// Create event handling components
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
let (event_sender, event_man_rx) = channel();
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
let mut pus_event_dispatcher =
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
let (pus_event_man_tx, pus_event_man_rx) = channel();
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
let mut reporter_event_handler = verif_reporter.clone();
let mut reporter_aocs = verif_reporter.clone();
event_man.subscribe_all(pus_event_man_send_provider.id());
let mut request_map = HashMap::new();
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
request_map.insert(RequestTargetId::AcsSubsystem as u32, acs_thread_tx);
let tc_source = PusTcSource {
tc_store,
tc_source: tc_source_tx,
};
// Create clones here to allow moving the values
let core_args = OtherArgs {
sock_addr,
verif_reporter,
event_sender,
event_request_tx,
request_map,
};
let tc_args = TcArgs {
tc_source,
tc_receiver: tc_source_rx,
};
let tm_args = TmArgs {
tm_store: tm_store.clone(),
tm_sink_sender: tm_funnel_tx.clone(),
tm_server_rx,
};
let aocs_to_funnel = tm_funnel_tx.clone();
let mut aocs_tm_store = tm_store.clone();
println!("Starting TMTC task");
let jh0 = thread::spawn(move || {
core_tmtc_task(core_args, tc_args, tm_args);
});
println!("Starting TM funnel task");
let jh1 = thread::spawn(move || {
let tm_funnel = TmFunnel {
tm_server_tx,
tm_funnel_rx,
};
loop {
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
tm_funnel
.tm_server_tx
.send(addr)
.expect("Sending TM to server failed");
}
}
});
println!("Starting event handling task");
let jh2 = thread::spawn(move || {
let mut timestamp: [u8; 7] = [0; 7];
let mut sender = EventTmSender::new(tm_store, tm_funnel_tx);
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
reporter_event_handler
.completion_success(event_req.token, timestamp)
.expect("Sending completion success failed");
};
loop {
if let Ok(event_req) = event_request_rx.try_recv() {
match event_req.request {
EventRequest::Enable(event) => {
pus_event_dispatcher
.enable_tm_for_event(&event)
.expect("Enabling TM failed");
update_time(&mut time_provider, &mut timestamp);
report_completion(event_req, &timestamp);
}
EventRequest::Disable(event) => {
pus_event_dispatcher
.disable_tm_for_event(&event)
.expect("Disabling TM failed");
update_time(&mut time_provider, &mut timestamp);
report_completion(event_req, &timestamp);
}
}
}
if let Ok((event, _param)) = pus_event_man_rx.try_recv() {
update_time(&mut time_provider, &mut timestamp);
pus_event_dispatcher
.generate_pus_event_tm_generic(&mut sender, &timestamp, event, None)
.expect("Sending TM as event failed");
}
thread::sleep(Duration::from_millis(400));
}
});
println!("Starting AOCS thread");
let jh3 = thread::spawn(move || {
let mut timestamp: [u8; 7] = [0; 7];
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
loop {
match acs_thread_rx.try_recv() {
Ok(request) => {
println!("ACS thread: Received HK request {:?}", request.0);
update_time(&mut time_provider, &mut timestamp);
match request.0 {
Request::HkRequest(hk_req) => match hk_req {
HkRequest::OneShot(address) => {
assert_eq!(address.target_id, RequestTargetId::AcsSubsystem as u32);
if address.unique_id == AcsHkIds::TestMgmSet as u32 {
let mut sp_header =
SpHeader::tm(PUS_APID, SequenceFlags::Unsegmented, 0, 0)
.unwrap();
let sec_header = PusTmSecondaryHeader::new_simple(
3,
Subservice::TmHkPacket as u8,
&timestamp,
);
let mut buf: [u8; 8] = [0; 8];
address.write_to_be_bytes(&mut buf).unwrap();
let pus_tm =
PusTm::new(&mut sp_header, sec_header, Some(&buf), true);
let addr = aocs_tm_store.add_pus_tm(&pus_tm);
aocs_to_funnel.send(addr).expect("Sending HK TM failed");
}
}
HkRequest::Enable(_) => {}
HkRequest::Disable(_) => {}
HkRequest::ModifyCollectionInterval(_, _) => {}
},
}
let started_token = reporter_aocs
.start_success(request.1, &timestamp)
.expect("Sending start success failed");
reporter_aocs
.completion_success(started_token, &timestamp)
.expect("Sending completion success failed");
}
Err(e) => match e {
TryRecvError::Empty => {}
TryRecvError::Disconnected => {
println!("ACS thread: Message Queue TX disconnected!")
}
},
}
thread::sleep(Duration::from_millis(500));
}
});
}
pub fn update_time(time_provider: &mut TimeProvider, timestamp: &mut [u8]) {
time_provider
.update_from_now()
.expect("Could not get current time");
time_provider
.write_to_bytes(timestamp)
.expect("Writing timestamp failed");
}

16
src/hk.rs Normal file
View File

@ -0,0 +1,16 @@
use satrs_core::tmtc::AddressableId;
pub type CollectionIntervalFactor = u32;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum AcsHkIds {
TestMgmSet = 1,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum HkRequest {
OneShot(AddressableId),
Enable(AddressableId),
Disable(AddressableId),
ModifyCollectionInterval(AddressableId, CollectionIntervalFactor),
}

View File

@ -1,4 +1,50 @@
use std::net::Ipv4Addr;
pub const OBSW_SERVER_ADDR: Ipv4Addr = Ipv4Addr::new(0, 0, 0, 0);
use satrs_mib::res_code::{ResultU16, ResultU16Info};
use satrs_mib::resultcode;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum RequestTargetId {
AcsSubsystem = 1,
CanTask = 2,
}
#[derive(Debug)]
pub enum GroupId {
Tmtc = 0,
Hk = 1,
}
pub const OBSW_SERVER_ADDR: Ipv4Addr = Ipv4Addr::UNSPECIFIED;
pub const SERVER_PORT: u16 = 7301;
pub mod tmtc_err {
use super::*;
#[resultcode]
pub const INVALID_PUS_SERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 0);
#[resultcode]
pub const INVALID_PUS_SUBSERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 1);
#[resultcode(info = "Not enough data inside the TC application data field")]
pub const NOT_ENOUGH_APP_DATA: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
pub const TMTC_RESULTS: &[ResultU16Info] = &[
INVALID_PUS_SERVICE_EXT,
INVALID_PUS_SUBSERVICE_EXT,
NOT_ENOUGH_APP_DATA_EXT,
];
}
pub mod hk_err {
use super::*;
#[resultcode]
pub const TARGET_ID_MISSING: ResultU16 = ResultU16::const_new(GroupId::Hk as u8, 0);
#[resultcode]
pub const UNIQUE_ID_MISSING: ResultU16 = ResultU16::const_new(GroupId::Hk as u8, 1);
#[resultcode]
pub const UNKNOWN_TARGET_ID: ResultU16 = ResultU16::const_new(GroupId::Hk as u8, 2);
#[resultcode]
pub const COLLECTION_INTERVAL_MISSING: ResultU16 = ResultU16::const_new(GroupId::Hk as u8, 3);
}

22
src/logger.rs Normal file
View File

@ -0,0 +1,22 @@
#![allow(unused_imports)]
use chrono;
use log::{debug, error, info, trace, warn};
pub fn setup_logger() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
std::thread::current().name().expect("unnamed_thread"),
record.level(),
message
))
})
.level(log::LevelFilter::Debug)
.chain(std::io::stdout())
.chain(fern::log_file("output.log")?)
.apply()?;
Ok(())
}

View File

@ -1,52 +1,52 @@
#![allow(dead_code)]
mod ccsds;
mod hk;
mod pus;
mod requests;
mod tmtc;
#[cfg(target_os = "unix")]
mod example_main;
mod can;
mod can_ids;
mod device_handler;
mod logger;
use crate::tmtc::{core_tmtc_task, CoreTmtcArgs, TmStore, PUS_APID};
use crate::hk::{AcsHkIds, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{
core_tmtc_task, OtherArgs, PusTcSource, TcArgs, TcStore, TmArgs, TmFunnel, TmStore, PUS_APID,
};
use satrs_core::event_man::{
EventManagerWithMpscQueue, MpscEventReceiver, MpscEventU32SendProvider, SendEventProvider,
};
use satrs_core::events::EventU32;
use satrs_core::hal::host::udp_server::UdpTcServer;
use satrs_core::pool::{LocalPool, PoolCfg, SharedPool, StoreAddr};
use satrs_core::pool::{LocalPool, PoolCfg, StoreAddr};
use satrs_core::pus::event_man::{
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken,
PusEventDispatcher,
};
use satrs_core::pus::hk::Subservice;
use satrs_core::pus::verification::{
MpscVerifSender, VerificationReporterCfg, VerificationReporterWithSender,
};
use satrs_core::pus::{EcssTmError, EcssTmSender};
use satrs_core::seq_count::SimpleSeqCountProvider;
use satrs_core::tmtc::CcsdsError;
use eurosim_obsw::{OBSW_SERVER_ADDR, SERVER_PORT};
use spacepackets::time::cds::TimeProvider;
use spacepackets::time::TimeWriter;
use spacepackets::tm::PusTm;
use satrs_core::pus::{EcssTmError, EcssTmErrorWithSend, EcssTmSenderCore};
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProvider, SequenceCountProviderCore};
use eurosim_obsw::{RequestTargetId, OBSW_SERVER_ADDR, SERVER_PORT};
use satrs_core::{
spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter,
spacepackets::tm::{PusTm, PusTmSecondaryHeader},
spacepackets::{SequenceFlags, SpHeader},
};
use std::collections::HashMap;
use std::net::{IpAddr, SocketAddr};
use std::sync::mpsc::channel;
use std::sync::mpsc::{channel, TryRecvError};
use std::sync::{mpsc, Arc, RwLock};
use std::thread;
struct TmFunnel {
tm_funnel_rx: mpsc::Receiver<StoreAddr>,
tm_server_tx: mpsc::Sender<StoreAddr>,
}
struct UdpTmtcServer {
udp_tc_server: UdpTcServer<CcsdsError<()>>,
tm_rx: mpsc::Receiver<StoreAddr>,
tm_store: SharedPool,
}
#[cfg(feature = "can")]
unsafe impl Send for UdpTmtcServer {}
use std::time::Duration;
use embedded_can::{Id, StandardId};
use log::{info, warn};
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use crate::can_ids::{can_id_to_package_id, load_package_ids, PackageId, PackageModel, ThreadId};
#[derive(Clone)]
struct EventTmSender {
@ -63,35 +63,63 @@ impl EventTmSender {
}
}
impl EcssTmSender for EventTmSender {
impl EcssTmSenderCore for EventTmSender {
type Error = mpsc::SendError<StoreAddr>;
fn send_tm(&mut self, tm: PusTm) -> Result<(), EcssTmError<Self::Error>> {
fn send_tm(&mut self, tm: PusTm) -> Result<(), EcssTmErrorWithSend<Self::Error>> {
let addr = self.store_helper.add_pus_tm(&tm);
self.sender.send(addr).map_err(EcssTmError::SendError)
self.sender.send(addr).map_err(EcssTmErrorWithSend::SendError)
}
}
fn main() {
println!("Running OBSW example");
let pool_cfg = PoolCfg::new(vec![(8, 32), (4, 64), (2, 128)]);
let tm_pool = LocalPool::new(pool_cfg);
let tm_store: SharedPool = Arc::new(RwLock::new(Box::new(tm_pool)));
let tm_store_helper = TmStore {
pool: tm_store.clone(),
println!("Running SESPSat OBSW");
logger::setup_logger().unwrap();
let tm_pool = LocalPool::new(PoolCfg::new(vec![
(30, 32),
(15, 64),
(15, 128),
(15, 256),
(15, 1024),
(15, 2048),
]));
let tm_store = TmStore {
pool: Arc::new(RwLock::new(Box::new(tm_pool))),
};
let addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let tc_pool = LocalPool::new(PoolCfg::new(vec![
(30, 32),
(15, 64),
(15, 128),
(15, 256),
(15, 1024),
(15, 2048),
]));
let tc_store = TcStore {
pool: Arc::new(RwLock::new(Box::new(tc_pool))),
};
let seq_count_provider = SeqCountProviderSyncClonable::default();
let aocs_seq_count_provider = seq_count_provider.clone();
let sock_addr = SocketAddr::new(IpAddr::V4(OBSW_SERVER_ADDR), SERVER_PORT);
let (tc_source_tx, tc_source_rx) = channel();
let (tm_funnel_tx, tm_funnel_rx) = channel();
let (tm_server_tx, tm_server_rx) = channel();
let sender = MpscVerifSender::new(tm_store.clone(), tm_funnel_tx.clone());
let verif_sender = MpscVerifSender::new(tm_store.pool.clone(), tm_funnel_tx.clone());
let verif_cfg = VerificationReporterCfg::new(
PUS_APID,
Box::new(SimpleSeqCountProvider::default()),
#[allow(clippy::box_default)]
Box::new(seq_count_provider.clone()),
1,
2,
8,
)
.unwrap();
let reporter_with_sender_0 = VerificationReporterWithSender::new(&verif_cfg, Box::new(sender));
.unwrap();
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
// Create event handling components
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
@ -104,26 +132,163 @@ fn main() {
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
let (pus_event_man_tx, pus_event_man_rx) = channel();
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
let mut reporter1 = reporter_with_sender_0.clone();
let mut reporter_event_handler = verif_reporter.clone();
let mut reporter_aocs = verif_reporter.clone();
event_man.subscribe_all(pus_event_man_send_provider.id());
// Create clones here to allow move for thread 0
let core_args = CoreTmtcArgs {
tm_store: tm_store_helper.clone(),
tm_sender: tm_funnel_tx.clone(),
// can_tm_sender: tm_funnel_tx.clone(),
// can_tm_store: tm_store_helper.clone(),
event_sender,
event_request_tx,
let mut request_map = HashMap::new();
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
let (can_thread_tx, can_thread_rx) = channel::<RequestWithToken>();
request_map.insert(RequestTargetId::AcsSubsystem as u32, acs_thread_tx);
//request_map.insert(RequestTargetId::CanTask as u32, can_thread_tx);
let tc_source = PusTcSource {
tc_store,
tc_source: tc_source_tx,
};
println!("Starting TMTC task");
// Create clones here to allow moving the values
let core_args = OtherArgs {
sock_addr,
verif_reporter,
event_sender,
event_request_tx,
request_map,
};
let tc_args = TcArgs {
tc_source,
tc_receiver: tc_source_rx,
};
let tm_args = TmArgs {
tm_store: tm_store.clone(),
tm_sink_sender: tm_funnel_tx.clone(),
tm_server_rx,
};
let (aocs_can_tx, aocs_can_rx) = mpsc::channel::<PackageModel>();
let (tm_can_tx, tm_can_rx) = mpsc::channel::<PackageModel>();
// make tx thread id hashmap
let mut can_senders = HashMap::new();
can_senders.insert(ThreadId::AOCSThread, aocs_can_tx);
can_senders.insert(ThreadId::TMThread, tm_can_tx);
// get package id hashmap
let package_ids_rx = load_package_ids();
// checks for packet ids
println!("{:?}", package_ids_rx[&PackageId::PCDUStatusRequest]);
println!(
"{:?}",
package_ids_rx[&PackageId::CameraImageRequestConfirmation]
);
let test = can_id_to_package_id(Id::Standard(StandardId::new(65).expect("Invalid Id")));
if let Some(id) = test {
println!("{:?}", package_ids_rx[&id]);
}
let socket0 = can::CanRxHandler::new_socket("can0", can_senders, package_ids_rx).unwrap();
info!("Starting TMTC task");
let jh0 = thread::spawn(move || {
core_tmtc_task(core_args, tm_server_rx, addr, reporter_with_sender_0);
core_tmtc_task(core_args, tc_args, tm_args);
});
info!("Starting CAN Socket listening task");
let builder1 = thread::Builder::new().name("CanRxHandler".into());
let jh1 = builder1.spawn(move || loop {
let frame = socket0.rx_socket();
if let Some(frame) = frame {
let forward = socket0.forward_frame(frame);
}
});
let allowed_ids_range = 101..110;
let mut allowed_ids_aocs = Vec::new();
for id in allowed_ids_range {
allowed_ids_aocs.push(Id::Standard(StandardId::new(id).unwrap()));
}
let package_map_aocs_tx = load_package_ids();
let aocs_tm_funnel_tx = tm_funnel_tx.clone();
let mut aocs_tm_store = tm_store.clone();
let socket1 =
can::CanTxHandler::new_socket("can0", ThreadId::AOCSThread, package_map_aocs_tx).unwrap();
info!("Starting AOCS receiving thread");
let builder2 = thread::Builder::new().name("AOCSThread".into());
let jh2 = builder2.spawn(move || {
let mut time_stamp_buf: [u8; 7] = [0; 7];
let mut huge_buf: [u8; 8192] = [0; 8192];
let data: [u8; 3] = [1, 2, 3];
let current_mgm_data = MgmData::default();
//let current_mgt_data = MgtData::default();
//let current_
loop {
// device handling
//info!("Sending {:?}", PackageId::AOCSControlMGT1);
//socket1.tx_socket(PackageId::AOCSControlMGT1, &data);
//info!("Waiting for {:?}", PackageId::AOCSDataMGM1);
let msg = aocs_can_rx.try_recv();
//current_mgm_data.x = new_data
match aocs_can_rx.try_recv() {
Ok(package) => {
match package.package_id() {
_ => warn!("Incorrect Id"),
}
}
Err(_) => {}
}
// telecommand handling
match acs_thread_rx.try_recv() {
Ok(request_with_token) => {
match request_with_token.0 {
Request::HkRequest(hk_req) => {
match hk_req {
HkRequest::OneShot(id) => {
assert_eq!(id.target_id, RequestTargetId::AcsSubsystem as u32);
if id.unique_id == 0 {
let mut sp_header = SpHeader::tm_unseg(0x02, aocs_seq_count_provider.get_and_increment(), 0).unwrap();
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf);
let mut len = id.write_to_be_bytes(&mut huge_buf).unwrap();
let json_string = "asdf";
huge_buf[8..json_string.len()+8].copy_from_slice(json_string.as_bytes());
len += json_string.len();
let tm_sec_header = PusTmSecondaryHeader::new_simple(3, Subservice::TmHkPacket as u8, &time_stamp_buf);
let hk_tm = PusTm::new(&mut sp_header, tm_sec_header, Some(&huge_buf[0..len]), true);
let addr = aocs_tm_store.add_pus_tm(&hk_tm);
aocs_tm_funnel_tx.send(addr).expect("sending failed");
/* let start_token = self //implement this for verification
.verif_reporter
.start_success(token, &self.time_stamp)
.expect("Error sending start success");
self.tm_tx
.send(addr)
.expect("Sending TM to TM funnel failed");
self.verif_reporter
.completion_success(start_token, &self.time_stamp)
.expect("Error sending completion success");
*/
}
}
HkRequest::Enable(_) => {}
HkRequest::Disable(_) => {}
HkRequest::ModifyCollectionInterval(_, _) => {}
}
}
}
}
Err(_) => {}
}
}
}
);
println!("Starting TM funnel task");
let jh1 = thread::spawn(move || {
let jh3 = thread::spawn(move || {
let tm_funnel = TmFunnel {
tm_server_tx,
tm_funnel_rx,
@ -138,59 +303,20 @@ fn main() {
}
});
println!("Starting event handling task");
let jh2 = thread::spawn(move || {
let mut timestamp: [u8; 7] = [0; 7];
let mut sender = EventTmSender::new(tm_store_helper, tm_funnel_tx);
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
reporter1
.completion_success(event_req.token, timestamp)
.expect("Sending completion success failed");
};
loop {
if let Ok(event_req) = event_request_rx.try_recv() {
match event_req.request {
EventRequest::Enable(event) => {
pus_event_dispatcher
.enable_tm_for_event(&event)
.expect("Enabling TM failed");
update_time(&mut time_provider, &mut timestamp);
report_completion(event_req, &timestamp);
}
EventRequest::Disable(event) => {
pus_event_dispatcher
.disable_tm_for_event(&event)
.expect("Disabling TM failed");
update_time(&mut time_provider, &mut timestamp);
report_completion(event_req, &timestamp);
}
}
}
if let Ok((event, _param)) = pus_event_man_rx.try_recv() {
update_time(&mut time_provider, &mut timestamp);
pus_event_dispatcher
.generate_pus_event_tm_generic(&mut sender, &timestamp, event, None)
.expect("Sending TM as event failed");
}
}
});
own_main();
jh0.join().expect("Joining UDP TMTC server thread failed");
jh1.unwrap().join().expect("Joining CAN Bus Listening thread failed");
jh2.unwrap().join().expect("Joing AOCS thread failed");
jh3.join().expect("Joing AOCS thread failed");
/*
jh1.join().expect("Joining TM Funnel thread failed");
jh2.join().expect("Joining Event Manager thread failed");
jh3.join().expect("Joining AOCS thread failed");
*/
}
pub fn update_time(time_provider: &mut TimeProvider, timestamp: &mut [u8]) {
time_provider
.update_from_now()
.expect("Could not get current time");
time_provider
.write_to_bytes(timestamp)
.expect("Writing timestamp failed");
}
fn own_main() {
}
#[derive(Default)]
struct MgmData {
x: i16,
y: i16,
z: i16
}

View File

@ -1,26 +1,38 @@
use crate::tmtc::TmStore;
use crate::hk::{CollectionIntervalFactor, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore};
use satrs_core::events::EventU32;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::event::Subservices;
use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken};
use satrs_core::pus::hk;
use satrs_core::pus::verification::{
FailParams, StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
};
use satrs_core::res_code::ResultU16;
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::PusServiceProvider;
use spacepackets::ecss::{EcssEnumU16, PusPacket};
use spacepackets::tc::PusTc;
use spacepackets::time::cds::TimeProvider;
use spacepackets::time::TimeWriter;
use spacepackets::SpHeader;
use std::sync::mpsc;
use satrs_core::tmtc::{AddressableId, PusServiceProvider};
use eurosim_obsw::{hk_err, tmtc_err};
use satrs_core::{
spacepackets::ecss::PusPacket,
spacepackets::tc::PusTc,
spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter,
spacepackets::SpHeader,
};
use std::collections::HashMap;
use std::sync::mpsc::Sender;
pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper,
pub tm_tx: mpsc::Sender<StoreAddr>,
pub tm_tx: Sender<StoreAddr>,
pub tm_store: TmStore,
pub verif_reporter: StdVerifReporterWithSender,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
#[allow(dead_code)]
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
stamper: TimeProvider,
time_stamp: [u8; 7],
}
@ -28,18 +40,22 @@ pub struct PusReceiver {
impl PusReceiver {
pub fn new(
apid: u16,
tm_tx: mpsc::Sender<StoreAddr>,
tm_tx: Sender<StoreAddr>,
tm_store: TmStore,
verif_reporter: StdVerifReporterWithSender,
event_request_tx: mpsc::Sender<EventRequestWithToken>,
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
) -> Self {
Self {
tm_helper: PusTmWithCdsShortHelper::new(apid),
tm_tx,
tm_store,
verif_reporter,
tc_source,
event_request_tx,
stamper: TimeProvider::default(),
request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
}
}
@ -63,27 +79,25 @@ impl PusServiceProvider for PusReceiver {
if service == 17 {
self.handle_test_service(pus_tc, accepted_token);
} else if service == 5 {
self.handle_event_service(pus_tc, accepted_token);
self.handle_event_request(pus_tc, accepted_token);
} else if service == 3 {
self.handle_hk_request(pus_tc, accepted_token);
} else {
// TODO: Unknown service verification failure
// TODO: Unknown service returncode
self.update_time_stamp();
self.verif_reporter
.start_failure(
accepted_token,
FailParams::new(&self.time_stamp, &tmtc_err::INVALID_PUS_SERVICE, None),
)
.expect("Start failure verification failed")
}
Ok(())
}
}
pub type CollInterval = u32;
pub enum HkIds {
AcsHk = 0
}
pub enum HkRequest {
OneShot(HkIds),
Enable(HkIds, CollInterval), // periodic
Disable(HkIds, CollInterval)
}
impl PusReceiver {
fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.subservice() == 1 {
if PusPacket::subservice(pus_tc) == 1 {
println!("Received PUS ping command TC[17,1]");
println!("Sending ping reply PUS TM[17,2]");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
@ -99,12 +113,11 @@ impl PusReceiver {
.completion_success(start_token, &self.time_stamp)
.expect("Error sending completion success");
} else {
// TODO: Unknown Subservice returncode
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(&self.time_stamp, &EcssEnumU16::new(2), None),
FailParams::new(&self.time_stamp, &tmtc_err::INVALID_PUS_SUBSERVICE, None),
)
.expect("Sending start failure TM failed");
}
@ -119,15 +132,84 @@ impl PusReceiver {
.expect("Writing timestamp failed");
}
fn handle_event_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(&self.time_stamp, &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
let user_data = pus_tc.user_data().unwrap();
if user_data.len() < 8 {
let err = if user_data.len() < 4 {
&hk_err::TARGET_ID_MISSING
} else {
&hk_err::UNIQUE_ID_MISSING
};
self.update_time_stamp();
self.verif_reporter
.start_failure(token, FailParams::new(&self.time_stamp, err, None))
.expect("Sending start failure TM failed");
return;
}
let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(&self.time_stamp, &hk_err::UNKNOWN_TARGET_ID, None),
)
.expect("Sending start failure TM failed");
return;
}
let send_request = |request: HkRequest| {
let sender = self.request_map.get(&addressable_id.target_id).unwrap();
sender
.send(RequestWithToken(Request::HkRequest(request), token))
.unwrap_or_else(|_| panic!("Sending HK request {:?} failed", request));
};
if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableGeneration as u8 {
send_request(HkRequest::Enable(addressable_id));
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableGeneration as u8 {
send_request(HkRequest::Disable(addressable_id));
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 {
send_request(HkRequest::OneShot(addressable_id));
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcModifyCollectionInterval as u8
{
if user_data.len() < 12 {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(
&self.time_stamp,
&hk_err::COLLECTION_INTERVAL_MISSING,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
send_request(HkRequest::ModifyCollectionInterval(
addressable_id,
CollectionIntervalFactor::from_be_bytes(user_data[8..12].try_into().unwrap()),
));
}
}
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let send_start_failure = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7],
failure_code: EcssEnumU16,
failure_code: &ResultU16,
failure_data: Option<&[u8]>| {
verif_reporter
.start_failure(
token,
FailParams::new(timestamp, &failure_code, failure_data),
FailParams::new(timestamp, failure_code, failure_data),
)
.expect("Sending start failure TM failed");
};
@ -142,7 +224,7 @@ impl PusReceiver {
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
EcssEnumU16::new(1),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
return;
@ -153,7 +235,7 @@ impl PusReceiver {
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
EcssEnumU16::new(1),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
return;
@ -181,12 +263,11 @@ impl PusReceiver {
.expect("Sending event request failed");
}
_ => {
// TODO: Unknown Subservice returncode
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
EcssEnumU16::new(2),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
);
}

10
src/requests.rs Normal file
View File

@ -0,0 +1,10 @@
use crate::hk::HkRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum Request {
HkRequest(HkRequest),
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct RequestWithToken(pub Request, pub VerificationToken<TcStateAccepted>);

View File

@ -1,28 +1,91 @@
use satrs_core::events::EventU32;
use satrs_core::hal::host::udp_server::{ReceiveResult, UdpTcServer};
use satrs_core::params::Params;
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{Display, Formatter};
use std::net::SocketAddr;
use std::sync::mpsc;
use std::sync::mpsc::Sender;
use std::sync::mpsc::{Receiver, SendError, Sender, TryRecvError};
use std::thread;
use std::time::Duration;
use crate::ccsds::CcsdsReceiver;
use crate::pus::PusReceiver;
use crate::UdpTmtcServer;
use satrs_core::pool::{SharedPool, StoreAddr};
use crate::requests::RequestWithToken;
use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
use satrs_core::pus::event_man::EventRequestWithToken;
use satrs_core::pus::verification::StdVerifReporterWithSender;
use satrs_core::tmtc::{CcsdsDistributor, CcsdsError, PusDistributor};
use spacepackets::tm::PusTm;
use satrs_core::tmtc::{
CcsdsDistributor, CcsdsError, PusServiceProvider, ReceivesCcsdsTc, ReceivesEcssPusTc,
};
use satrs_core::{
spacepackets::ecss::PusPacket,
spacepackets::tc::PusTc,
spacepackets::tm::PusTm,
spacepackets::SpHeader,
};
pub const PUS_APID: u16 = 0x02;
pub struct CoreTmtcArgs {
pub tm_store: TmStore,
pub tm_sender: Sender<StoreAddr>,
pub struct OtherArgs {
pub sock_addr: SocketAddr,
pub verif_reporter: StdVerifReporterWithSender,
pub event_sender: Sender<(EventU32, Option<Params>)>,
pub event_request_tx: Sender<EventRequestWithToken>,
pub request_map: HashMap<u32, Sender<RequestWithToken>>,
}
pub struct TmArgs {
pub tm_store: TmStore,
pub tm_sink_sender: Sender<StoreAddr>,
pub tm_server_rx: Receiver<StoreAddr>,
}
pub struct TcArgs {
pub tc_source: PusTcSource,
pub tc_receiver: Receiver<StoreAddr>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MpscStoreAndSendError {
StoreError(StoreError),
SendError(SendError<StoreAddr>),
}
impl Display for MpscStoreAndSendError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
MpscStoreAndSendError::StoreError(s) => {
write!(f, "store error {}", s)
}
MpscStoreAndSendError::SendError(s) => {
write!(f, "send error {}", s)
}
}
}
}
impl Error for MpscStoreAndSendError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
MpscStoreAndSendError::StoreError(s) => Some(s),
MpscStoreAndSendError::SendError(s) => Some(s),
}
}
}
impl From<StoreError> for MpscStoreAndSendError {
fn from(value: StoreError) -> Self {
Self::StoreError(value)
}
}
impl From<SendError<StoreAddr>> for MpscStoreAndSendError {
fn from(value: SendError<StoreAddr>) -> Self {
Self::SendError(value)
}
}
#[derive(Clone)]
@ -30,57 +93,138 @@ pub struct TmStore {
pub pool: SharedPool,
}
#[derive(Clone)]
pub struct TcStore {
pub pool: SharedPool,
}
impl TmStore {
pub fn add_pus_tm(&mut self, pus_tm: &PusTm) -> StoreAddr {
let mut pg = self.pool.write().expect("Error locking TM store");
let mut pg = self.pool.write().expect("error locking TM store");
let (addr, buf) = pg.free_element(pus_tm.len_packed()).expect("Store error");
pus_tm
.write_to_bytes(buf)
.expect("Writing PUS TM to store failed");
.expect("writing PUS TM to store failed");
addr
}
}
pub fn core_tmtc_task(
args: CoreTmtcArgs,
tm_server_rx: mpsc::Receiver<StoreAddr>,
addr: SocketAddr,
verif_reporter: StdVerifReporterWithSender,
) {
let pus_receiver = PusReceiver::new(
impl TcStore {
pub fn add_pus_tc(&mut self, pus_tc: &PusTc) -> Result<StoreAddr, StoreError> {
let mut pg = self.pool.write().expect("error locking TC store");
let (addr, buf) = pg.free_element(pus_tc.len_packed())?;
pus_tc
.write_to_bytes(buf)
.expect("writing PUS TC to store failed");
Ok(addr)
}
}
pub struct TmFunnel {
pub tm_funnel_rx: Receiver<StoreAddr>,
pub tm_server_tx: Sender<StoreAddr>,
}
pub struct UdpTmtcServer {
udp_tc_server: UdpTcServer<CcsdsError<MpscStoreAndSendError>>,
tm_rx: Receiver<StoreAddr>,
tm_store: SharedPool,
}
#[derive(Clone)]
pub struct PusTcSource {
pub tc_source: Sender<StoreAddr>,
pub tc_store: TcStore,
}
impl ReceivesEcssPusTc for PusTcSource {
type Error = MpscStoreAndSendError;
fn pass_pus_tc(&mut self, _: &SpHeader, pus_tc: &PusTc) -> Result<(), Self::Error> {
let addr = self.tc_store.add_pus_tc(pus_tc)?;
self.tc_source.send(addr)?;
Ok(())
}
}
impl ReceivesCcsdsTc for PusTcSource {
type Error = MpscStoreAndSendError;
fn pass_ccsds(&mut self, _: &SpHeader, tc_raw: &[u8]) -> Result<(), Self::Error> {
let mut pool = self.tc_store.pool.write().expect("locking pool failed");
let addr = pool.add(tc_raw)?;
drop(pool);
self.tc_source.send(addr)?;
Ok(())
}
}
pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
let mut pus_receiver = PusReceiver::new(
PUS_APID,
args.tm_sender,
args.tm_store.clone(),
verif_reporter,
tm_args.tm_sink_sender,
tm_args.tm_store.clone(),
args.verif_reporter,
tc_args.tc_source.clone(),
args.event_request_tx,
args.request_map,
);
let pus_distributor = PusDistributor::new(Box::new(pus_receiver));
let ccsds_receiver = CcsdsReceiver {
pus_handler: pus_distributor,
tc_source: tc_args.tc_source.clone(),
};
let ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver));
let udp_tc_server = UdpTcServer::new(addr, 2048, Box::new(ccsds_distributor))
let udp_tc_server = UdpTcServer::new(args.sock_addr, 2048, Box::new(ccsds_distributor))
.expect("Creating UDP TMTC server failed");
let mut udp_tmtc_server = UdpTmtcServer {
udp_tc_server,
tm_rx: tm_server_rx,
tm_store: args.tm_store.pool.clone(),
tm_rx: tm_args.tm_server_rx,
tm_store: tm_args.tm_store.pool.clone(),
};
loop {
core_tmtc_loop(&mut udp_tmtc_server);
core_tmtc_loop(&mut udp_tmtc_server, &mut tc_args, &mut pus_receiver);
thread::sleep(Duration::from_millis(400));
}
}
fn core_tmtc_loop(udp_tmtc_server: &mut UdpTmtcServer) {
while core_tc_handling(udp_tmtc_server) {}
fn core_tmtc_loop(
udp_tmtc_server: &mut UdpTmtcServer,
tc_args: &mut TcArgs,
pus_receiver: &mut PusReceiver,
) {
while poll_tc_server(udp_tmtc_server) {}
match tc_args.tc_receiver.try_recv() {
Ok(addr) => {
let pool = tc_args
.tc_source
.tc_store
.pool
.read()
.expect("locking tc pool failed");
let data = pool.read(&addr).expect("reading pool failed");
match PusTc::from_bytes(data) {
Ok((pus_tc, _)) => {
pus_receiver
.handle_pus_tc_packet(pus_tc.service(), pus_tc.sp_header(), &pus_tc)
.ok();
}
Err(e) => {
println!("error creating PUS TC from raw data: {}", e);
println!("raw data: {:x?}", data);
}
}
}
Err(e) => {
if let TryRecvError::Disconnected = e {
println!("tmtc thread: sender disconnected")
}
}
}
if let Some(recv_addr) = udp_tmtc_server.udp_tc_server.last_sender() {
core_tm_handling(udp_tmtc_server, &recv_addr);
}
}
fn core_tc_handling(udp_tmtc_server: &mut UdpTmtcServer) -> bool {
fn poll_tc_server(udp_tmtc_server: &mut UdpTmtcServer) -> bool {
match udp_tmtc_server.udp_tc_server.try_recv_tc() {
Ok(_) => true,
Err(e) => match e {
@ -108,9 +252,9 @@ fn core_tm_handling(udp_tmtc_server: &mut UdpTmtcServer, recv_addr: &SocketAddr)
let mut store_lock = udp_tmtc_server
.tm_store
.write()
.expect("Locking TM store failed");
.expect("locking TM store failed");
let pg = store_lock.read_with_guard(addr);
let buf = pg.read().expect("Error reading TM pool data");
let buf = pg.read().expect("error reading TM pool data");
if buf.len() > 9 {
let service = buf[7];
let subservice = buf[8];
@ -122,6 +266,6 @@ fn core_tm_handling(udp_tmtc_server: &mut UdpTmtcServer, recv_addr: &SocketAddr)
.udp_tc_server
.socket
.send_to(buf, recv_addr)
.expect("Sending TM failed");
.expect("sending TM failed");
}
}