added scheduling implementation from example

This commit is contained in:
lkoester
2023-02-01 11:05:57 +01:00
parent 62784b32cc
commit 27790242b0
15 changed files with 614 additions and 132 deletions

View File

@ -1,5 +1,5 @@
use satrs_core::tmtc::AddressableId;
use eurosim_obsw::RequestTargetId;
use satrs_core::tmtc::AddressableId;
pub type CollectionIntervalFactor = u32;
@ -19,4 +19,4 @@ pub enum Subservice {
ImageRequest = 1,
OrientationRequest = 2,
PointingRequest = 3,
}
}

View File

@ -0,0 +1 @@

View File

@ -4,7 +4,6 @@ pub type CollectionIntervalFactor = u32;
//#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum CameraRequest {
OneShot(AddressableId),

View File

@ -152,20 +152,20 @@ impl CanTxHandler {
let value = self.package_map.get(&package_id).unwrap();
if value.get_sender() == DeviceId::OBC {
//if value.get_thread() == self.thread_id {
if data.len() <= 8 {
let frame_id = package_id_to_can_id(&package_id);
let frame = CanFrame::new(frame_id, data);
if let Some(frame) = frame {
self.socket
.write_frame(&frame)
.expect("Error writing frame.");
}
} else {
warn!(
"Message dismissed, data length ({:?}) exceeds 8 bytes",
data.len()
);
if data.len() <= 8 {
let frame_id = package_id_to_can_id(&package_id);
let frame = CanFrame::new(frame_id, data);
if let Some(frame) = frame {
self.socket
.write_frame(&frame)
.expect("Error writing frame.");
}
} else {
warn!(
"Message dismissed, data length ({:?}) exceeds 8 bytes",
data.len()
);
}
/*} else {
warn!(
"Message dismissed, mismatched thread id: {:?}",

View File

@ -5,7 +5,6 @@ use std::fs;
use std::sync::mpsc::Sender;
use std::thread::Thread;
pub use num_derive::{FromPrimitive, ToPrimitive};
pub use num_traits::{FromPrimitive, ToPrimitive};
pub use strum::IntoEnumIterator; // 0.17.1
@ -61,7 +60,7 @@ pub enum PackageId {
AOCSDataSunSensor5 = 69,
AOCSDataSunSensor6 = 70,
AOCSDataStarTracker = 71,
CameraImageRequest = 101 ,
CameraImageRequest = 101,
CameraImageRequestConfirmation = 102,
CameraImageExecutionStart = 103,
CameraImageExectutionEnd = 104,
@ -193,7 +192,7 @@ pub fn load_package_ids() -> HashMap<PackageId, SenderReceiverThread> {
SenderReceiverThread::new(DeviceId::SunSensor5, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::SunSensor6, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId:: OBC , DeviceId:: Camera , ThreadId:: PLDThread ),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::Camera, ThreadId::PLDThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::PLDThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::PLDThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::PLDThread),

View File

@ -1,15 +1,15 @@
use crate::can_ids::{DeviceId, PackageId, PackageModel, ThreadId};
use log::{info, warn};
use socketcan::{errors, frame, socket, CanFrame, Socket};
use std::collections::HashMap;
use std::hash::Hash;
use std::sync::mpsc::{Receiver, RecvError, Sender};
use log::{info, warn};
use crate::can_ids::{DeviceId, PackageId, PackageModel, ThreadId};
use socketcan::{errors, frame, socket, CanFrame, Socket};
use crate::can::{CanRxHandler, CanTxHandler};
pub use num_derive::{FromPrimitive, ToPrimitive};
pub use num_traits::{FromPrimitive, ToPrimitive};
pub use strum::IntoEnumIterator; // 0.17.1
pub use strum_macros::EnumIter;
use crate::can::{CanRxHandler, CanTxHandler}; // 0.17.1
pub use strum_macros::EnumIter; // 0.17.1
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum DeviceState {
@ -26,23 +26,31 @@ pub struct CanDeviceHandler {
}
impl CanDeviceHandler {
pub fn new(can_tx_handler: CanTxHandler, can_rx_receiver: Receiver<PackageModel>) -> CanDeviceHandler {
let mut device_state_map:HashMap<DeviceId, DeviceState> = HashMap::new();
pub fn new(
can_tx_handler: CanTxHandler,
can_rx_receiver: Receiver<PackageModel>,
) -> CanDeviceHandler {
let mut device_state_map: HashMap<DeviceId, DeviceState> = HashMap::new();
for id in DeviceId::iter() {
device_state_map.insert(id, DeviceState::Unknown);
}
CanDeviceHandler { device_state_map, can_tx_handler, can_rx_receiver }
CanDeviceHandler {
device_state_map,
can_tx_handler,
can_rx_receiver,
}
}
pub fn power_on(&mut self, id: DeviceId) -> Result<(),()> {
pub fn power_on(&mut self, id: DeviceId) -> Result<(), ()> {
if !self.device_state_map.contains_key(&id) {
return Err(());
}
info!("Powering on device {:?}", id);
let msg_data: [u8; 1] = [id as u8];
self.can_tx_handler.tx_socket(PackageId::DevicePowerOnRequest, &msg_data);
self.can_tx_handler
.tx_socket(PackageId::DevicePowerOnRequest, &msg_data);
let request_confirmation = self.can_rx_receiver.recv();
match request_confirmation {
@ -93,14 +101,15 @@ impl CanDeviceHandler {
Ok(())
}
pub fn power_off(&mut self, id: DeviceId) -> Result<(),()> {
pub fn power_off(&mut self, id: DeviceId) -> Result<(), ()> {
if !self.device_state_map.contains_key(&id) {
return Err(());
}
info!("Powering on device {:?}", id);
let msg_data: [u8; 1] = [id as u8];
self.can_tx_handler.tx_socket(PackageId::DevicePowerOffRequest, &msg_data);
self.can_tx_handler
.tx_socket(PackageId::DevicePowerOffRequest, &msg_data);
let request_confirmation = self.can_rx_receiver.recv();
match request_confirmation {
@ -153,23 +162,26 @@ impl CanDeviceHandler {
pub fn get_power_states(&mut self) -> HashMap<DeviceId, DeviceState> {
for id in DeviceId::iter() {
self.update_power_state(id).expect("Error updating power state.");
self.update_power_state(id)
.expect("Error updating power state.");
}
self.device_state_map.clone()
}
pub fn get_power_state(&mut self, id: DeviceId) -> Option<&DeviceState> {
self.update_power_state(id).expect("Error updating power state.");
self.update_power_state(id)
.expect("Error updating power state.");
self.device_state_map.get(&id)
}
pub fn update_power_state(&mut self, id: DeviceId) -> Result<(),()> {
pub fn update_power_state(&mut self, id: DeviceId) -> Result<(), ()> {
if !self.device_state_map.contains_key(&id) {
return Err(());
}
let msg_data: [u8; 1] = [id as u8];
self.can_tx_handler.tx_socket(PackageId::DevicePowerStatusRequest, &msg_data);
self.can_tx_handler
.tx_socket(PackageId::DevicePowerStatusRequest, &msg_data);
let response = self.can_rx_receiver.recv();
if let Ok(response) = response {
@ -193,15 +205,18 @@ impl CanDeviceHandler {
pub fn power_up_sequence(mut device_handler: CanDeviceHandler) -> HashMap<DeviceId, DeviceState> {
for id in DeviceId::iter() {
device_handler.power_on(id).expect("Error powering on device.");
device_handler
.power_on(id)
.expect("Error powering on device.");
}
device_handler.get_power_states()
}
pub fn power_down_sequence(mut device_handler: CanDeviceHandler) -> HashMap<DeviceId, DeviceState> {
for id in DeviceId::iter() {
device_handler.power_off(id).expect("Error powering on device.");
device_handler
.power_off(id)
.expect("Error powering on device.");
}
device_handler.get_power_states()
}

View File

@ -13,4 +13,4 @@ pub enum HkRequest {
Enable(AddressableId),
Disable(AddressableId),
ModifyCollectionInterval(AddressableId, CollectionIntervalFactor),
}
}

View File

@ -1,3 +1,6 @@
mod action;
mod aocs;
mod cam;
mod can;
mod can_ids;
mod ccsds;
@ -7,9 +10,6 @@ mod logger;
mod pus;
mod requests;
mod tmtc;
mod aocs;
mod cam;
mod action;
use crate::hk::{AcsHkIds, HkRequest};
use crate::requests::{Request, RequestWithToken};
@ -332,8 +332,7 @@ fn main() {
ActionRequest::ImageRequest(target_id) => {
assert_eq!(target_id, RequestTargetId::PldSubsystem);
// get current time stamp
let cds_stamp =
TimeProvider::from_now_with_u16_days().unwrap();
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf);
// send start verification and get token
@ -353,7 +352,10 @@ fn main() {
loop {
match pld_can_rx.recv() {
Ok(msg) => {
if msg.package_id() == PackageId::CameraImageRequestConfirmation && msg.data()[0] == 1 {
if msg.package_id()
== PackageId::CameraImageRequestConfirmation
&& msg.data()[0] == 1
{
break;
}
}
@ -366,7 +368,10 @@ fn main() {
loop {
match pld_can_rx.recv() {
Ok(msg) => {
if msg.package_id() == PackageId::CameraImageExecutionStart && msg.data()[0] == 1 {
if msg.package_id()
== PackageId::CameraImageExecutionStart
&& msg.data()[0] == 1
{
break;
}
}
@ -379,14 +384,21 @@ fn main() {
loop {
match pld_can_rx.recv() {
Ok(msg) => {
if msg.package_id() == PackageId::CameraImageExectutionEnd && msg.data()[0] == 1 {
if msg.package_id()
== PackageId::CameraImageExectutionEnd
&& msg.data()[0] == 1
{
let cds_stamp =
TimeProvider::from_now_with_u16_days().unwrap();
TimeProvider::from_now_with_u16_days()
.unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf);
// send end verification with token
reporter_pld
.completion_success(start_token, Some(&time_stamp_buf))
.completion_success(
start_token,
Some(&time_stamp_buf),
)
.expect("Error sending start success.");
break;
}
@ -409,7 +421,6 @@ fn main() {
}
});
println!("Starting TM funnel task");
let builder4 = thread::Builder::new().name("TMFunnelThread".into());
let jh4 = builder4.spawn(move || {
@ -427,7 +438,9 @@ fn main() {
}
});
jh0.unwrap().join().expect("Joining UDP TMTC server thread failed");
jh0.unwrap()
.join()
.expect("Joining UDP TMTC server thread failed");
jh1.unwrap()
.join()
.expect("Joining CAN Bus Listening thread failed");
@ -446,4 +459,4 @@ struct MgmData {
x: i16,
y: i16,
z: i16,
}
}

View File

@ -1,7 +1,7 @@
use crate::hk::{CollectionIntervalFactor, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore};
use eurosim_obsw::{hk_err, RequestTargetId, tmtc_err};
use eurosim_obsw::{hk_err, tmtc_err, RequestTargetId};
use satrs_core::events::EventU32;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::event::Subservices;
@ -14,16 +14,19 @@ use satrs_core::res_code::ResultU16;
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider};
use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader,
spacepackets, spacepackets::ecss::PusPacket, spacepackets::tc::PusTc,
spacepackets::time::cds::TimeProvider, spacepackets::time::TimeWriter, spacepackets::SpHeader,
};
use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::mpsc::Sender;
use satrs_core::spacepackets::ecss::PusServiceId::Action;
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use crate::action;
use crate::action::ActionRequest;
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use satrs_core::pus::scheduling::PusScheduler;
use satrs_core::spacepackets::ecss::PusServiceId::Action;
use std::collections::HashMap;
use std::rc::Rc;
use std::sync::mpsc::Sender;
pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper,
@ -36,6 +39,7 @@ pub struct PusReceiver {
request_map: HashMap<u32, Sender<RequestWithToken>>,
stamper: TimeProvider,
time_stamp: [u8; 7],
scheduler: Rc<RefCell<PusScheduler>>,
}
impl PusReceiver {
@ -47,6 +51,7 @@ impl PusReceiver {
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
scheduler: Rc<RefCell<PusScheduler>>,
) -> Self {
Self {
tm_helper: PusTmWithCdsShortHelper::new(apid),
@ -58,6 +63,7 @@ impl PusReceiver {
request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
scheduler,
}
}
}
@ -85,6 +91,8 @@ impl PusServiceProvider for PusReceiver {
self.handle_hk_request(pus_tc, accepted_token);
} else if service == 8 {
self.handle_function_request(pus_tc, accepted_token);
} else if service == 11 {
self.handle_scheduled_tc(pus_tc, accepted_token);
} else {
self.update_time_stamp();
self.verif_reporter
@ -120,7 +128,11 @@ impl PusReceiver {
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::INVALID_PUS_SUBSERVICE, None),
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("Sending start failure TM failed");
}
@ -277,41 +289,38 @@ impl PusReceiver {
}
}
fn handle_function_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
fn handle_function_request(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
/*self.verif_reporter
self.verif_reporter
.start_failure(
token,
FailParams::new(&self.time_stamp, &tmtc_err::NOT_ENOUGH_APP_DATA, None),
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
*/
return;
}
let send_request = |request: ActionRequest| {
match request {
ActionRequest::ImageRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
ActionRequest::OrientationRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
_ => {}
let send_request = |request: ActionRequest| match request {
ActionRequest::ImageRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
ActionRequest::OrientationRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
_ => {}
};
if PusPacket::subservice(pus_tc) == action::Subservice::ImageRequest as u8 {
@ -320,4 +329,119 @@ impl PusReceiver {
send_request(ActionRequest::OrientationRequest(AcsSubsystem));
}
}
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
self.update_time_stamp();
match pus_tc.subservice() {
1 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.enable();
if scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to enable scheduler");
}
drop(scheduler);
}
2 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.disable();
if !scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to disable scheduler");
}
drop(scheduler);
}
3 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.reset(pool.as_mut())
.expect("Error resetting TC Pool");
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
}
4 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
.expect("TODO: panic message");
let time =
TimeProvider::from_bytes_with_u16_days(&pus_tc.user_data().unwrap()).unwrap();
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
//let addr = self.tc_source.tc_store.add_pus_tc().unwrap();
//let unix_time = UnixTimestamp::new_only_seconds(self.stamper.unix_seconds());
//let worked = self.scheduler.insert_tc(unix_time, );
}
_ => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
}
}
}

View File

@ -1,7 +1,7 @@
use crate::hk::HkRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
use crate::action::ActionRequest;
use crate::cam::CameraRequest;
use crate::hk::HkRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum Request {

View File

@ -1,11 +1,14 @@
use satrs_core::events::EventU32;
use satrs_core::hal::host::udp_server::{ReceiveResult, UdpTcServer};
use satrs_core::params::Params;
use std::cell::RefCell;
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{Display, Formatter};
use std::net::SocketAddr;
use std::rc::Rc;
use std::sync::mpsc::{Receiver, SendError, Sender, TryRecvError};
use std::sync::{Arc, LockResult, Mutex};
use std::thread;
use std::time::Duration;
@ -14,16 +17,13 @@ use crate::pus::PusReceiver;
use crate::requests::RequestWithToken;
use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
use satrs_core::pus::event_man::EventRequestWithToken;
use satrs_core::pus::scheduling::PusScheduler;
use satrs_core::pus::verification::StdVerifReporterWithSender;
use satrs_core::spacepackets::{ecss::PusPacket, tc::PusTc, tm::PusTm, SpHeader};
use satrs_core::tmtc::{
CcsdsDistributor, CcsdsError, PusServiceProvider, ReceivesCcsdsTc, ReceivesEcssPusTc,
};
use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::tm::PusTm,
spacepackets::SpHeader,
};
pub const PUS_APID: u16 = 0x02;
pub struct OtherArgs {
@ -45,6 +45,12 @@ pub struct TcArgs {
pub tc_receiver: Receiver<StoreAddr>,
}
impl TcArgs {
fn split(self) -> (PusTcSource, Receiver<StoreAddr>) {
(self.tc_source, self.tc_receiver)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MpscStoreAndSendError {
StoreError(StoreError),
@ -55,10 +61,10 @@ impl Display for MpscStoreAndSendError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
MpscStoreAndSendError::StoreError(s) => {
write!(f, "store error {}", s)
write!(f, "store error {s}")
}
MpscStoreAndSendError::SendError(s) => {
write!(f, "send error {}", s)
write!(f, "send error {s}")
}
}
}
@ -156,6 +162,11 @@ impl ReceivesCcsdsTc for PusTcSource {
}
}
pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
let mut scheduler = Rc::new(RefCell::new(
PusScheduler::new_with_current_init_time(Duration::from_secs(5)).unwrap(),
));
let mut sched_clone = scheduler.clone();
let mut pus_receiver = PusReceiver::new(
PUS_APID,
tm_args.tm_sink_sender,
@ -164,11 +175,15 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
tc_args.tc_source.clone(),
args.event_request_tx,
args.request_map,
sched_clone,
);
let ccsds_receiver = CcsdsReceiver {
tc_source: tc_args.tc_source.clone(),
};
let ccsds_distributor = CcsdsDistributor::new(Box::new(ccsds_receiver));
let udp_tc_server = UdpTcServer::new(args.sock_addr, 2048, Box::new(ccsds_distributor))
.expect("Creating UDP TMTC server failed");
@ -177,8 +192,21 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
tm_rx: tm_args.tm_server_rx,
tm_store: tm_args.tm_store.pool.clone(),
};
//let (mut tc_source, mut tc_receiver) = tc_args.split();
let mut tc_buf: [u8; 4096] = [0; 4096];
loop {
core_tmtc_loop(&mut udp_tmtc_server, &mut tc_args, &mut pus_receiver);
let mut tmtc_sched = scheduler.clone();
core_tmtc_loop(
&mut udp_tmtc_server,
&mut tc_args,
&mut tc_buf,
//&mut tc_source,
//&mut tc_receiver,
&mut pus_receiver,
tmtc_sched,
);
thread::sleep(Duration::from_millis(400));
}
}
@ -186,8 +214,40 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
fn core_tmtc_loop(
udp_tmtc_server: &mut UdpTmtcServer,
tc_args: &mut TcArgs,
tc_buf: &mut [u8],
//tc_source: &mut PusTcSource,
//tc_receiver: &mut Receiver<StoreAddr>,
pus_receiver: &mut PusReceiver,
scheduler: Rc<RefCell<PusScheduler>>,
) {
let releaser = |enabled: bool, addr: &StoreAddr| -> bool {
match tc_args.tc_source.tc_source.send(*addr) {
Ok(_) => true,
Err(_) => false,
}
};
let mut pool = tc_args
.tc_source
.tc_store
.pool
.write()
.expect("error locking pool");
let mut scheduler = scheduler.borrow_mut();
scheduler.update_time_from_now().unwrap();
match scheduler.release_telecommands(releaser, pool.as_mut()) {
Ok(released_tcs) => {
if released_tcs > 0 {
println!("{} Tc(s) released from scheduler", released_tcs);
}
}
Err(_) => {}
}
//.expect("error releasing tc");
drop(pool);
drop(scheduler);
while poll_tc_server(udp_tmtc_server) {}
match tc_args.tc_receiver.try_recv() {
Ok(addr) => {
@ -198,15 +258,17 @@ fn core_tmtc_loop(
.read()
.expect("locking tc pool failed");
let data = pool.read(&addr).expect("reading pool failed");
match PusTc::from_bytes(data) {
tc_buf[0..data.len()].copy_from_slice(data);
drop(pool);
match PusTc::from_bytes(tc_buf) {
Ok((pus_tc, _)) => {
pus_receiver
.handle_pus_tc_packet(pus_tc.service(), pus_tc.sp_header(), &pus_tc)
.ok();
}
Err(e) => {
println!("error creating PUS TC from raw data: {}", e);
println!("raw data: {:x?}", data);
println!("error creating PUS TC from raw data: {e}");
println!("raw data: {tc_buf:x?}");
}
}
}
@ -226,7 +288,7 @@ fn poll_tc_server(udp_tmtc_server: &mut UdpTmtcServer) -> bool {
Ok(_) => true,
Err(e) => match e {
ReceiveResult::ReceiverError(e) => match e {
CcsdsError::PacketError(e) => {
CcsdsError::ByteConversionError(e) => {
println!("Got packet error: {e:?}");
true
}
@ -255,7 +317,7 @@ fn core_tm_handling(udp_tmtc_server: &mut UdpTmtcServer, recv_addr: &SocketAddr)
if buf.len() > 9 {
let service = buf[7];
let subservice = buf[8];
println!("Sending PUS TM[{},{}]", service, subservice)
println!("Sending PUS TM[{service},{subservice}]")
} else {
println!("Sending PUS TM");
}