switching to main for some tests

This commit is contained in:
lkoester 2023-02-24 07:29:37 +01:00
parent f145ccaf8e
commit f8cf78340c
16 changed files with 1624 additions and 531 deletions

57
Cargo.lock generated
View File

@ -297,6 +297,7 @@ dependencies = [
"num",
"num-derive",
"num-traits",
"num_enum",
"satrs-core",
"satrs-mib",
"serde",
@ -315,6 +316,12 @@ dependencies = [
"log",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.13.1"
@ -369,6 +376,16 @@ dependencies = [
"cxx-build",
]
[[package]]
name = "indexmap"
version = "1.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
]
[[package]]
name = "itertools"
version = "0.10.5"
@ -501,6 +518,15 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "nom8"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
dependencies = [
"memchr",
]
[[package]]
name = "num"
version = "0.4.0"
@ -613,6 +639,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2be1598bf1c313dcdd12092e3f1920f463462525a21b7b4e11b4168353d0123e"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn",
@ -649,6 +676,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "proc-macro-crate"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34"
dependencies = [
"once_cell",
"toml_edit",
]
[[package]]
name = "proc-macro2"
version = "1.0.47"
@ -704,8 +741,9 @@ dependencies = [
"downcast-rs",
"dyn-clone",
"embed-doc-image",
"hashbrown",
"hashbrown 0.13.1",
"num-traits",
"num_enum",
"paste",
"serde",
"spacepackets",
@ -885,6 +923,23 @@ dependencies = [
"winapi",
]
[[package]]
name = "toml_datetime"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
[[package]]
name = "toml_edit"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
dependencies = [
"indexmap",
"nom8",
"toml_datetime",
]
[[package]]
name = "unicode-ident"
version = "1.0.5"

View File

@ -27,7 +27,7 @@ num = "0.4"
num-derive = "0.3"
num-traits = "0.2"
byteorder = "1.4"
num_enum = "0.5"
[dependencies.socketcan]
git = "https://github.com/socketcan-rs/socketcan-rs.git"

View File

@ -41,3 +41,85 @@
[2023-02-16][12:46:02][main][INFO] Starting Payload Handling task
[2023-02-16][12:46:02][main][INFO] Starting TM funnel task
[2023-02-16][12:46:02][main][INFO] Starting AOCS task
[2023-02-16][13:06:46][main][INFO] Running DemoSat OBSW!
[2023-02-16][13:06:46][main][INFO] Starting TMTC task
[2023-02-16][13:06:46][main][INFO] Starting power task
[2023-02-16][13:06:46][main][INFO] Starting Payload Handling task
[2023-02-16][13:06:46][main][INFO] Starting TM funnel task
[2023-02-16][13:06:46][main][INFO] Starting AOCS task
[2023-02-16][13:24:38][main][INFO] Running DemoSat OBSW!
[2023-02-16][13:24:38][main][INFO] Starting TMTC task
[2023-02-16][13:24:38][main][INFO] Starting power task
[2023-02-16][13:24:38][main][INFO] Starting Payload Handling task
[2023-02-16][13:24:38][main][INFO] Starting TM funnel task
[2023-02-16][13:24:38][main][INFO] Starting AOCS task
[2023-02-16][13:27:13][main][INFO] Running DemoSat OBSW!
[2023-02-16][13:27:13][main][INFO] Starting TMTC task
[2023-02-16][13:27:13][main][INFO] Starting power task
[2023-02-16][13:27:13][main][INFO] Starting Payload Handling task
[2023-02-16][13:27:13][main][INFO] Starting TM funnel task
[2023-02-16][13:27:13][main][INFO] Starting AOCS task
[2023-02-16][13:27:17][TMTCThread][INFO] Received PUS ping command TC[17,1]
[2023-02-16][13:27:17][TMTCThread][INFO] Sending ping reply PUS TM[17,2]
[2023-02-16][13:27:17][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-16][13:27:17][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-16][13:27:17][TMTCThread][INFO] Sending PUS TM[17,2]
[2023-02-16][13:27:17][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-16][13:27:25][PLDThread][DEBUG] PictureRequest
[2023-02-16][13:27:25][PLDThread][DEBUG] switching power
[2023-02-16][13:27:25][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-16][13:27:25][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-16][14:51:51][main][INFO] Running DemoSat OBSW!
[2023-02-16][14:51:51][main][INFO] Starting TMTC task
[2023-02-16][14:51:51][main][INFO] Starting power task
[2023-02-16][14:51:51][main][INFO] Starting Payload Handling task
[2023-02-16][14:51:51][main][INFO] Starting TM funnel task
[2023-02-16][14:51:51][main][INFO] Starting AOCS task
[2023-02-17][13:17:47][main][INFO] Running DemoSat OBSW!
[2023-02-17][13:17:47][main][INFO] Starting TMTC task
[2023-02-17][13:17:47][main][INFO] Starting power task
[2023-02-17][13:17:47][main][INFO] Starting Payload Handling task
[2023-02-17][13:17:47][main][INFO] Starting TM funnel task
[2023-02-17][13:17:47][main][INFO] Starting AOCS task
[2023-02-17][13:18:25][main][INFO] Running DemoSat OBSW!
[2023-02-17][13:18:25][main][INFO] Starting TMTC task
[2023-02-17][13:18:25][main][INFO] Starting Power Handling task
[2023-02-17][13:18:25][main][INFO] Starting Payload Handling task
[2023-02-17][13:18:25][main][INFO] Starting TM funnel task
[2023-02-17][13:18:25][main][INFO] Starting AOCS task
[2023-02-17][13:19:12][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-17][13:19:12][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-17][13:19:12][TMTCThread][INFO] Sending PUS TM[3,25]
[2023-02-17][13:19:12][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-17][13:19:23][TMTCThread][INFO] Received PUS ping command TC[17,1]
[2023-02-17][13:19:23][TMTCThread][INFO] Sending ping reply PUS TM[17,2]
[2023-02-17][13:19:23][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-17][13:19:23][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-17][13:19:23][TMTCThread][INFO] Sending PUS TM[17,2]
[2023-02-17][13:19:23][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-17][14:27:01][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-17][14:27:01][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-17][14:27:02][TMTCThread][INFO] Sending PUS TM[3,25]
[2023-02-17][14:27:02][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-17][14:28:00][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-17][14:28:00][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-17][14:28:00][TMTCThread][INFO] Sending PUS TM[3,25]
[2023-02-17][14:28:00][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-20][14:04:11][main][INFO] Running DemoSat OBSW!
[2023-02-20][14:04:11][main][INFO] Starting TMTC task
[2023-02-20][14:04:11][main][INFO] Starting Power Handling task
[2023-02-20][14:04:11][main][INFO] Starting Payload Handling task
[2023-02-20][14:04:11][main][INFO] Starting TM funnel task
[2023-02-20][14:04:11][main][INFO] Starting AOCS task
[2023-02-20][14:04:46][TMTCThread][INFO] Received PUS ping command TC[17,1]
[2023-02-20][14:04:46][TMTCThread][INFO] Sending ping reply PUS TM[17,2]
[2023-02-20][14:04:46][TMTCThread][INFO] Sending PUS TM[1,1]
[2023-02-20][14:04:46][TMTCThread][INFO] Sending PUS TM[1,3]
[2023-02-20][14:04:46][TMTCThread][INFO] Sending PUS TM[17,2]
[2023-02-20][14:04:46][TMTCThread][INFO] Sending PUS TM[1,7]
[2023-02-21][10:52:36][main][INFO] Running DemoSat OBSW!
[2023-02-21][10:52:36][main][INFO] Starting TMTC task
[2023-02-21][10:52:36][main][INFO] Starting Power Handling task
[2023-02-21][10:52:36][main][INFO] Starting Payload Handling task
[2023-02-21][10:52:36][main][INFO] Starting TM funnel task
[2023-02-21][10:52:36][main][INFO] Starting AOCS task

View File

@ -223,7 +223,7 @@ def read_addressable_id(data: bytes) -> tuple[int, int]:
return (target_id, set_id)
class CustomServiceList(enum.IntEnum):
class CustomServiceList(enum.StrEnum):
ACS = "acs"
class RequestTargetId(enum.IntEnum):
@ -331,6 +331,8 @@ class TcHandler(TcHandlerBase):
if service == CoreServiceList.SERVICE_3:
if op_code in HkOpCodes.GENERATE_ONE_SHOT:
q.add_log_cmd("Sending HK one shot request")
tc = generate_one_hk_command(make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET))
q.add_log_cmd(tc)
q.add_pus_tc(
generate_one_hk_command(
make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET)

View File

@ -1,7 +1,7 @@
{
"com_if": "udp",
"tcpip_udp_ip_addr": "192.168.1.116",
"tcpip_udp_ip_addr_windows": "192.168.1.5",
"tcpip_udp_ip_addr_raspi": "192.168.1.116",
"tcpip_udp_ip_addr": "192.168.56.1",
"tcpip_udp_port": 7301,
"tcpip_udp_recv_max_size": 1500
}

View File

@ -1 +1,442 @@
use std::sync::{Arc, Mutex};
use crate::can_ids::{DeviceId, PackageId, PackageModel};
use crate::power_handler::{DeviceState, PowerSwitcher};
use crate::requests::{Request, RequestWithToken};
use satrs_core::power::SwitchId;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::u32;
use serde::{Deserialize, Serialize};
use byteorder::{LittleEndian, ByteOrder};
use num_derive::ToPrimitive;
use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::verification::{StdVerifSenderError, TcStateAccepted, TcStateNone, VerificationReporterWithSender, VerificationToken};
use satrs_core::seq_count::SeqCountProviderSyncClonable;
use crate::action::ActionRequest;
use crate::hk::{AOCSHousekeeper, AOCSSensorData};
use crate::tmtc::{TmStore};
#[derive(ToPrimitive)]
pub enum AOCSSensorMode {
Idle = 0,
SendingData = 1,
}
pub trait AOCSSensorHandler {
type Error;
fn get_package_id(&mut self) -> Result<PackageId, Self::Error>;
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error>;
fn enable_sensor_data_generation(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[1])
}
fn disable_sensor_data_generation(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[0])
}
fn request_sensor_data_oneshot(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[2])
}
}
#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize)]
pub struct MGMData {
axis_1: f64,
axis_2: f64,
axis_3: f64,
}
impl MGMData {
pub fn from_floats(axis_1: f64,
axis_2: f64,
axis_3: f64,
) -> MGMData {
MGMData{axis_1, axis_2, axis_3}
}
pub fn new() -> MGMData { MGMData{axis_1: 0.0, axis_2: 0.0, axis_3: 0.0} }
pub fn update(&mut self,
axis_1: f64,
axis_2: f64,
axis_3: f64
) {
self.axis_1 = axis_1;
self.axis_2 = axis_2;
self.axis_3 = axis_3;
}
pub fn to_array(&self) -> [f64; 3] {
[self.axis_1, self.axis_2, self.axis_3]
}
pub fn to_tuple(&self) -> (f64, f64, f64) {
(self.axis_1, self.axis_2, self.axis_3)
}
}
#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize)]
pub struct CSSData {
voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
}
impl CSSData {
pub fn from_floats(voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
) -> CSSData {
CSSData{voltage_1, voltage_2, voltage_3, voltage_4, voltage_5, voltage_6}
}
pub fn new() -> CSSData { CSSData{voltage_1: 0.0, voltage_2: 0.0, voltage_3: 0.0, voltage_4: 0.0, voltage_5: 0.0, voltage_6: 0.0} }
pub fn update(&mut self,
voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
) {
self.voltage_1 = voltage_1;
self.voltage_2 = voltage_2;
self.voltage_3 = voltage_3;
self.voltage_4 = voltage_4;
self.voltage_5 = voltage_5;
self.voltage_6 = voltage_6;
}
pub fn to_array(&self) -> [f64; 6] {
[self.voltage_1, self.voltage_2, self.voltage_3, self.voltage_4, self.voltage_5, self.voltage_6]
}
pub fn to_tuple(&self) -> (f64, f64, f64, f64, f64, f64) {
(self.voltage_1, self.voltage_2, self.voltage_3, self.voltage_4, self.voltage_5, self.voltage_6)
}
}
pub struct CSSHandler {
power_switcher: PowerSwitcher,
device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState,
device_mode: AOCSSensorMode,
css_data: Arc<Mutex<CSSData>>,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>,
}
impl AOCSSensorHandler for CSSHandler {
type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
Ok(PackageId::AOCSDataRequestSunSensor1)
}
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx.send(PackageModel::new(id, buf).unwrap()).unwrap();
return Ok(());
}
}
impl CSSHandler {
pub fn new(
power_switcher: PowerSwitcher,
device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState,
device_mode: AOCSSensorMode,
css_data: Arc<Mutex<CSSData>>,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>,
) -> CSSHandler {
CSSHandler{power_switcher, device_id, switch_id, device_state, device_mode, css_data: Arc::new(Mutex::new(CSSData::default())), can_tx, can_rx, request_rx}
}
pub fn get_data_ref(&mut self) -> Arc<Mutex<CSSData>> {
self.css_data.clone()
}
pub fn css_core_task(&mut self) {
self.handle_request_messages();
self.handle_sensor();
}
fn handle_request_messages(&mut self) {
let request = self.request_rx.try_recv().unwrap();
let token = request.1;
match request.0 {
Request::HkRequest(_) => {}
Request::ModeRequest(request) => {
self.handle_mode_request(request, token);
}
Request::ActionRequest(request) => {
self.handle_action_request(request, token)
}
}
}
fn handle_mode_request(&mut self, request: ModeRequest, token: VerificationToken<TcStateAccepted>) {
match request {
ModeRequest::SetMode(mode) => {
match mode.mode() {
0 => {}
_ => {}
}
}
ModeRequest::ReadMode(_) => {}
ModeRequest::AnnounceMode(_) => {}
ModeRequest::AnnounceModeRecursive(_) => {}
}
}
fn handle_action_request(&mut self, request: ActionRequest, token: VerificationToken<TcStateAccepted>) {
match request {
ActionRequest::ImageRequest(target_id) => {
}
ActionRequest::OrientationRequest(_) => {}
ActionRequest::PointingRequest(_) => {}
}
}
}
pub struct MGMHandler {
power_switcher: PowerSwitcher,
device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
mode: AOCSSensorMode,
request_rx: Receiver<RequestWithToken>,
mgm_data: Arc<Mutex<MGMData>>,
}
impl AOCSSensorHandler for MGMHandler {
type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
Ok(PackageId::AOCSDataRequestMGM1)
}
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx.send(PackageModel::new(id, buf).unwrap()).unwrap();
return Ok(());
}
}
impl MGMHandler {
pub fn new(
power_switcher: PowerSwitcher,
device_id: DeviceId,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
action_rx: Receiver<RequestWithToken>,
) -> MGMHandler {
let switch_id = device_id as u16;
MGMHandler {
power_switcher,
device_id,
switch_id,
device_state: DeviceState::Off,
can_tx,
can_rx,
mode: AOCSSensorMode::Idle,
request_rx: action_rx,
mgm_data: Arc::new(Mutex::new(MGMData::new())),
}
}
pub fn get_data_ref(&mut self) -> Arc<Mutex<MGMData>> {
self.mgm_data.clone()
}
pub fn periodic_op(&mut self) {
self.update_mode();
self.handle_requests();
self.read_sensor_data();
}
pub fn update_mode(&mut self) {
}
pub fn handle_requests(&mut self) {
if self.device_state == DeviceState::On {
if let Ok(request) = self.request_rx.try_recv() {
match request.0 {
Request::HkRequest(hk_req) => {
//self.handle_hk_request(hk_req);
}
Request::ActionRequest(_action_request) => {
//self.handle_action_request(action_request);
}
Request::ModeRequest(_mode_request) => {
//self.handle_mode_request(mode_request);
}
}
}
}
}
pub fn handle_mode_request(&mut self, mode_request: ModeRequest) {
match mode_request{
ModeRequest::SetMode(_) => {}
ModeRequest::ReadMode(_) => {}
ModeRequest::AnnounceMode(_) => {}
ModeRequest::AnnounceModeRecursive(_) => {}
}
}
/*
pub fn handle_hk_request(&mut self, hk_req: HkRequest) {
match hk_req {
HkRequest::OneShot(_) => {
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[2]).unwrap()).unwrap();
}
HkRequest::Enable(_) => {
if !self.sensor_data_enabled {
self.sensor_data_enabled = true;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[1]).unwrap()).unwrap();
}
}
HkRequest::Disable(_) => {
if self.sensor_data_enabled {
self.sensor_data_enabled = false;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[0]).unwrap()).unwrap();
}
}
HkRequest::ModifyCollectionInterval(_, _) => {}
}
}
*/
pub fn read_sensor_data(&mut self) {
if let Ok(package) = self.can_rx.try_recv() {
let float_data = self.decode_sensor_data(package.data());
if let Ok(mut mgm_data) = self.mgm_data.lock() {
match package.package_id() {
PackageId::AOCSDataMGM1 => { mgm_data.axis_1 = float_data }
PackageId::AOCSDataMGM2 => { mgm_data.axis_2 = float_data }
PackageId::AOCSDataMGM3 => { mgm_data.axis_3 = float_data }
_ => {}
}
}
}
}
pub fn decode_sensor_data(&mut self, buf: &[u8]) -> f64 {
LittleEndian::read_f64(&buf)
}
//pub fn handle_action_request(&mut self, action_request: ActionRequest) {}
}
pub struct AOCSController {
aocs_housekeeper: AOCSHousekeeper,
mgm_handler: MGMHandler,
request_rx: Receiver<RequestWithToken>,
hk_request_tx: Sender<RequestWithToken>,
mgm_request_tx: Sender<RequestWithToken>,
}
impl AOCSController {
pub fn new(
sensor_data_pool: Arc<Mutex<AOCSSensorData>>,
seq_count_provider: SeqCountProviderSyncClonable,
aocs_can_receiver_rx: Receiver<PackageModel>,
aocs_can_sender_tx: Sender<PackageModel>,
mgm_can_receiver_rx: Receiver<PackageModel>,
aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>,
power_switcher: PowerSwitcher,
aocs_request_rx: Receiver<RequestWithToken>,
) -> AOCSController {
let mgm_can_sender_tx = aocs_can_sender_tx.clone();
let (mgm_request_tx, mgm_request_rx) = channel();
let (hk_request_tx, hk_request_rx) = channel();
let aocs_housekeeper = AOCSHousekeeper::new(sensor_data_pool, hk_request_rx, seq_count_provider, aocs_tm_store, aocs_tm_funnel_tx, verif_reporter);
let mgm_handler = MGMHandler::new(power_switcher, DeviceId::MGM1, mgm_can_sender_tx, mgm_can_receiver_rx, mgm_request_rx);
AOCSController{aocs_housekeeper, mgm_handler, request_rx: aocs_request_rx, hk_request_tx, mgm_request_tx}
}
pub fn periodic_op(&mut self) {
self.update_sensors();
self.process_requests();
}
pub fn process_requests(&mut self) {
if let Ok(request) = self.request_rx.try_recv() {
match request.0 {
Request::HkRequest(hk_request) => {
self.handle_hk_request(hk_request);
}
Request::ModeRequest(mode_request) => {
self.handle_mode_request(mode_request);
}
Request::ActionRequest(_) => {}
}
}
}
pub fn handle_hk_request(&mut self, request: HkRequest) {
match request {
HkRequest::OneShot(id) => {
self.aocs_housekeeper.one_shot_hk(id);
}
HkRequest::Enable(id) => {
self.aocs_housekeeper.enable_hk(id);
}
HkRequest::Disable(id) => {
self.aocs_housekeeper.disable_hk(id);
}
HkRequest::ModifyCollectionInterval(_, _) => {}
}
}
pub fn handle_mode_request(&mut self, mode_request: ModeRequest) {
match mode_request {
ModeRequest::SetMode(mode_command) => {
// if let mode_command.target_id
match mode_command.mode() {
0 => self.set_mode_off(),
1 => self.set_mode_on(),
_ => {}
}
}
ModeRequest::ReadMode(_) => {}
ModeRequest::AnnounceMode(_) => {}
ModeRequest::AnnounceModeRecursive(_) => {}
}
}
pub fn set_mode_off(&mut self) {}
pub fn set_mode_on(&mut self) {}
pub fn update_sensors(&mut self) {
self.mgm_handler.periodic_op();
}
}

View File

@ -1,206 +0,0 @@
use std::sync::{Arc, Mutex};
use crate::can_ids::{DeviceId, PackageId, PackageModel};
use crate::hk::HkRequest;
use crate::power_handler::{DeviceState, PowerSwitcher};
use crate::requests::{Request, RequestWithToken};
use satrs_core::power::SwitchId;
use std::sync::mpsc::{Receiver, Sender};
use serde::{Deserialize, Serialize};
use byteorder::{LittleEndian, ByteOrder};
use crate::can_ids::PackageId::AOCSDataRequestMGM1;
pub enum AocsSensorMode {
Idle,
SendingData,
}
pub trait AocsSensorHandler {
type Error;
fn get_package_id(&mut self) -> Result<PackageId, Self::Error>;
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error>;
fn enable_sensor_data_generation(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[1])
}
fn disable_sensor_data_generation(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[0])
}
fn request_sensor_data_oneshot(&mut self) -> Result<(), Self::Error> {
let id = self.get_package_id()?;
self.send_message(id, &[2])
}
}
#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize)]
pub struct MGMData {
axis_1: f64,
axis_2: f64,
axis_3: f64,
}
impl MGMData {
pub fn from_floats(axis_1: f64,
axis_2: f64,
axis_3: f64,
) -> MGMData {
MGMData{axis_1, axis_2, axis_3}
}
pub fn new() -> MGMData { MGMData{axis_1: 0.0, axis_2: 0.0, axis_3: 0.0} }
pub fn update(&mut self,
axis_1: f64,
axis_2: f64,
axis_3: f64
) {
self.axis_1 = axis_1;
self.axis_2 = axis_2;
self.axis_3 = axis_3;
}
pub fn to_array(&self) -> [f64; 3] {
[self.axis_1, self.axis_2, self.axis_3]
}
pub fn to_tuple(&self) -> (f64, f64, f64) {
(self.axis_1, self.axis_2, self.axis_3)
}
}
pub struct MGMHandler {
power_switcher: PowerSwitcher,
device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
mode: AocsSensorMode,
mode_rx: Receiver<AocsSensorMode>,
action_rx: Receiver<RequestWithToken>,
mgm_data: Arc<Mutex<MGMData>>,
sensor_data_enabled: bool,
}
impl AocsSensorHandler for MGMHandler {
type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
return match self.device_id {
DeviceId::MGM1 => Ok(PackageId::AOCSDataRequestMGM1),
DeviceId::MGM2 => Ok(PackageId::AOCSDataRequestMGM2),
DeviceId::MGM3 => Ok(PackageId::AOCSDataRequestMGM3),
DeviceId::MGM4 => Ok(PackageId::AOCSDataRequestMGM4),
_ => Err(()),
};
}
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx.send(PackageModel::new(id, buf).unwrap()).unwrap();
return Ok(());
}
}
impl MGMHandler {
pub fn new(
power_switcher: PowerSwitcher,
device_id: DeviceId,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
mode_rx: Receiver<AocsSensorMode>,
action_rx: Receiver<RequestWithToken>,
) -> MGMHandler {
let switch_id = device_id as u16;
MGMHandler {
power_switcher,
device_id,
switch_id,
device_state: DeviceState::Off,
can_tx,
can_rx,
mode: AocsSensorMode::Idle,
mode_rx,
action_rx,
mgm_data: Arc::new(Mutex::new(MGMData::new())),
sensor_data_enabled: false,
}
}
pub fn get_data_ref(&mut self) -> Arc<Mutex<MGMData>> {
self.mgm_data.clone()
}
pub fn periodic_op(&mut self) {
self.update_mode();
self.handle_requests();
self.read_sensor_data();
}
pub fn update_mode(&mut self) {
if self.device_state == DeviceState::On {
if let Ok(mode) = self.mode_rx.try_recv() {
self.mode = mode;
}
}
}
pub fn handle_requests(&mut self) {
if self.device_state == DeviceState::On {
if let Ok(request) = self.action_rx.try_recv() {
match request.0 {
Request::HkRequest(hk_req) => {
self.handle_hk_request(hk_req);
}
Request::ActionRequest(_action_request) => {
//self.handle_action_request(action_request);
}
}
}
}
}
pub fn handle_hk_request(&mut self, hk_req: HkRequest) {
match hk_req {
HkRequest::OneShot(_) => {
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[2]).unwrap()).unwrap();
}
HkRequest::Enable(_) => {
if !self.sensor_data_enabled {
self.sensor_data_enabled = true;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[1]).unwrap()).unwrap();
}
}
HkRequest::Disable(_) => {
if self.sensor_data_enabled {
self.sensor_data_enabled = false;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[0]).unwrap()).unwrap();
}
}
HkRequest::ModifyCollectionInterval(_, _) => {}
}
}
pub fn read_sensor_data(&mut self) {
if let Ok(package) = self.can_rx.try_recv() {
let float_data = self.decode_sensor_data(package.data());
if let Ok(mut mgm_data) = self.mgm_data.lock() {
match package.package_id() {
PackageId::AOCSDataMGM1 => { mgm_data.axis_1 = float_data }
PackageId::AOCSDataMGM2 => { mgm_data.axis_2 = float_data }
PackageId::AOCSDataMGM3 => { mgm_data.axis_3 = float_data }
_ => {}
}
}
}
}
pub fn decode_sensor_data(&mut self, buf: &[u8]) -> f64 {
LittleEndian::read_f64(&buf)
}
//pub fn handle_action_request(&mut self, action_request: ActionRequest) {}
}

View File

@ -56,7 +56,10 @@ pub enum PackageId {
AOCSDataSunSensor4 = 68,
AOCSDataSunSensor5 = 69,
AOCSDataSunSensor6 = 70,
AOCSDataStarTracker = 71,
AOCSDataStarTracker1 = 71,
AOCSDataStarTracker2 = 72,
AOCSDataStarTracker3 = 73,
AOCSDataStarTracker4 = 74,
CameraImageRequest = 101,
CameraImageRequestConfirmation = 102,
CameraImageExecutionStart = 103,
@ -222,6 +225,9 @@ pub fn load_package_ids() -> HashMap<PackageId, SenderReceiverThread> {
SenderReceiverThread::new(DeviceId::SunSensor5, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::SunSensor6, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::StarTracker, DeviceId::OBC, ThreadId::AOCSThread),
SenderReceiverThread::new(DeviceId::OBC, DeviceId::Camera, ThreadId::PLDThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::PLDThread),
SenderReceiverThread::new(DeviceId::Camera, DeviceId::OBC, ThreadId::PLDThread),

View File

@ -1,4 +1,4 @@
use crate::aocs_handler::{MGMData};
use crate::aocs::{CSSData, MGMData};
use crate::requests::Request;
use crate::requests::RequestWithToken;
use crate::tmtc::TmStore;
@ -18,6 +18,7 @@ use serde::{Deserialize, Serialize};
use std::ops::{Deref};
use std::sync::mpsc::{Receiver, Sender};
use std::sync::{Arc, Mutex};
use satrs_core::hk::HkRequest;
pub type CollectionIntervalFactor = u32;
@ -27,27 +28,19 @@ pub enum AocsHkIds {
TestMgmSet = 2,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum HkRequest {
OneShot(AddressableId),
Enable(AddressableId),
Disable(AddressableId),
ModifyCollectionInterval(AddressableId, CollectionIntervalFactor),
}
#[derive(Serialize, Deserialize)]
pub struct AocsSensorData {
pub struct AOCSSensorData {
mgm_data: MGMData, // Voltage for 3 axis
css_data: [f64; 18], // Voltage for 18 sun sensors
css_data: CSSData, // Voltage for 18 sun sensors
str_data: [f64; 4], // Quaternion for position of satellite
}
impl AocsSensorData {
pub fn new() -> AocsSensorData {
impl AOCSSensorData {
pub fn new() -> AOCSSensorData {
let mgm_data = MGMData::default();
let css_data = [0.0; 18];
let css_data = CSSData::default();
let str_data = [0.0; 4];
AocsSensorData {
AOCSSensorData {
mgm_data,
css_data,
str_data,
@ -59,8 +52,9 @@ impl AocsSensorData {
self.mgm_data = *data;
}
pub fn write_css_data(&mut self, css_data: [f64; 18]) {
self.css_data = css_data;
pub fn update_css_data(&mut self, css_data: &Arc<Mutex<CSSData>>) {
let data = css_data.lock().unwrap();
self.css_data = *data;
}
pub fn write_str_data(&mut self, str_data: [f64; 4]) {
@ -71,7 +65,7 @@ impl AocsSensorData {
self.mgm_data
}
pub fn read_css_data(&mut self) -> [f64; 18] {
pub fn read_css_data(&mut self) -> CSSData {
self.css_data
}
@ -80,38 +74,42 @@ impl AocsSensorData {
}
}
pub struct AocsHousekeeper {
sensor_data_pool: Arc<Mutex<AocsSensorData>>,
action_rx: Receiver<RequestWithToken>,
pub struct AOCSHousekeeper {
sensor_data_pool: Arc<Mutex<AOCSSensorData>>,
request_rx: Receiver<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable,
aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>,
periodic_hk_enabled: bool,
periodic_hk_id: Option<AddressableId>,
}
impl AocsHousekeeper {
impl AOCSHousekeeper {
pub fn new(
sensor_data_pool: Arc<Mutex<AocsSensorData>>,
action_rx: Receiver<RequestWithToken>,
sensor_data_pool: Arc<Mutex<AOCSSensorData>>,
request_rx: Receiver<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable,
aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<StdVerifSenderError>,
) -> AocsHousekeeper {
AocsHousekeeper {
) -> AOCSHousekeeper {
AOCSHousekeeper {
sensor_data_pool,
action_rx,
request_rx,
seq_count_provider,
aocs_tm_store,
aocs_tm_funnel_tx,
verif_reporter,
periodic_hk_enabled: false,
periodic_hk_id: None,
}
}
pub fn handle_hk_request(&mut self) {
let mut time_stamp_buf: [u8; 7] = [0; 7];
if let Ok(request_with_token) = self.action_rx.try_recv() {
if let Ok(request_with_token) = self.request_rx.try_recv() {
if let Request::HkRequest(hk_req) = request_with_token.0 {
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
@ -148,18 +146,31 @@ impl AocsHousekeeper {
}
}
pub fn periodic_hk(&mut self) {
if self.periodic_hk_enabled {
let json_string = self.aocs_data_to_str();
if let Some(id) = self.periodic_hk_id {
self.send_hk_packet(id, &json_string);
}
}
}
pub fn one_shot_hk(&mut self, id: AddressableId) -> Result<(), ()> {
let json_string = self.aocs_data_to_str();
self.send_hk_packet(id, &json_string);
Ok(())
}
pub fn enable_hk(&mut self, _id: AddressableId) -> Result<(), ()> {
Ok(())
pub fn enable_hk(&mut self, id: AddressableId) -> Result<(), ()> {
self.periodic_hk_enabled = true;
self.periodic_hk_id = Some(id);
return Ok(())
}
pub fn disable_hk(&mut self, _id: AddressableId) -> Result<(), ()> {
Ok(())
self.periodic_hk_enabled = false;
self.periodic_hk_id = None;
return Ok(())
}
pub fn aocs_data_to_str(&mut self) -> String {

View File

@ -1,8 +1,20 @@
use std::net::Ipv4Addr;
use num_enum::{IntoPrimitive, TryFromPrimitive};
use satrs_core::events::{EventU32TypedSev, SeverityInfo};
use satrs_mib::res_code::{ResultU16, ResultU16Info};
use satrs_mib::resultcode;
#[derive(Copy, Clone, PartialEq, Eq, Debug, TryFromPrimitive, IntoPrimitive)]
#[repr(u8)]
pub enum CustomPusServiceId {
Mode = 200,
Health = 201,
}
pub const TEST_EVENT: EventU32TypedSev<SeverityInfo> =
EventU32TypedSev::<SeverityInfo>::const_new(0, 0);
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum RequestTargetId {
AcsSubsystem = 1,
@ -25,8 +37,14 @@ pub mod tmtc_err {
pub const INVALID_PUS_SERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 0);
#[resultcode]
pub const INVALID_PUS_SUBSERVICE: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 1);
#[resultcode]
pub const PUS_SERVICE_NOT_IMPLEMENTED: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
#[resultcode(info = "Not enough data inside the TC application data field")]
#[resultcode(
info = "Not enough data inside the TC application data field. Optionally includes: \
8 bytes of failure data containing 2 failure parameters, \
P1 (u32 big endian): Expected data length, P2: Found data length"
)]
pub const NOT_ENOUGH_APP_DATA: ResultU16 = ResultU16::const_new(GroupId::Tmtc as u8, 2);
pub const TMTC_RESULTS: &[ResultU16Info] = &[

View File

@ -4,7 +4,6 @@
mod action;
mod aocs;
mod aocs_handler;
mod cam;
#[cfg(feature = "can")]
mod can;
@ -17,6 +16,7 @@ mod power_handler;
mod pus;
mod requests;
mod tmtc;
mod messages;
use crate::requests::{RequestWithToken};
use crate::tmtc::{
@ -50,10 +50,10 @@ use std::sync::mpsc::channel;
use std::sync::{mpsc, Arc, Mutex, RwLock};
use std::thread;
//use libc::time64_t;
use crate::aocs_handler::{MGMData, MGMHandler};
use crate::aocs::{MGMData, MGMHandler};
#[cfg(feature = "can")]
use crate::can::CanTxHandler;
use crate::hk::{AocsHousekeeper, AocsSensorData};
use crate::hk::{AOCSHousekeeper, AOCSSensorData};
use crate::pld_handler::{core_pld_task};
use crate::power_handler::{core_power_task, PowerSwitcher};
@ -232,7 +232,7 @@ fn main() {
let power_switcher = PowerSwitcher::new(pcdu_tx, clonable_device_state_map.clone());
info!("Starting power task");
info!("Starting Power Handling task");
let builder2 = thread::Builder::new().name("PowerThread".into());
let jh2 = builder2.spawn(move || {
core_power_task(
@ -288,8 +288,7 @@ fn main() {
let mgm_shared_data: Arc<Mutex<MGMData>> = Arc::default();
let aocs_sensor_data = Arc::new(Mutex::new(AocsSensorData::new()));
let (aocs_mode_tx, aocs_mode_rx) = channel();
let aocs_sensor_data = Arc::new(Mutex::new(AOCSSensorData::new()));
let (mgm_action_tx, mgm_action_rx) = channel();
let power_switcher_aocs = power_switcher.clone();
@ -297,10 +296,10 @@ fn main() {
info!("Starting AOCS task");
let builder5 = thread::Builder::new().name("AOCSThread".into());
let jh5 = builder5.spawn(move || {
let mut mgm_handler = MGMHandler::new(power_switcher_aocs.clone(), DeviceId::MGM1, can_tx_sender.clone(), aocs_can_rx, aocs_mode_rx, mgm_action_rx);
let mut mgm_handler = MGMHandler::new(power_switcher_aocs.clone(), DeviceId::MGM1, can_tx_sender.clone(), aocs_can_rx, mgm_action_rx);
let aocs_sensor_data = Arc::new(Mutex::new(AocsSensorData::new()));
let mut aocs_housekeeper = AocsHousekeeper::new(
let aocs_sensor_data = Arc::new(Mutex::new(AOCSSensorData::new()));
let mut aocs_housekeeper = AOCSHousekeeper::new(
aocs_sensor_data.clone(),
aocs_thread_rx,
aocs_seq_count_provider,

15
src/messages.rs Normal file
View File

@ -0,0 +1,15 @@
use std::sync::mpsc::Sender;
use crate::requests::RequestWithToken;
pub struct InternalMessage {
response_sender: Option<Sender<ResponseDataTypes>>,
message_data: MessageDataTypes,
}
pub enum MessageDataTypes {
RequestWithToken(RequestWithToken),
}
pub enum ResponseDataTypes {
}

View File

@ -1,69 +1,105 @@
use crate::hk::{CollectionIntervalFactor, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore};
use eurosim_obsw::{hk_err, tmtc_err};
use log::{info, warn};
use satrs_core::events::EventU32;
use satrs_core::hk::{CollectionIntervalFactor, HkRequest};
use satrs_core::mode::{ModeAndSubmode, ModeCommand, ModeRequest};
use satrs_core::params::Params;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::event::Subservice;
use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken};
use satrs_core::pus::hk;
use satrs_core::pus::mode;
use satrs_core::pus::mode::Subservice;
use satrs_core::pus::scheduling::PusScheduler;
use satrs_core::pus::verification::{
FailParams, StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
pus_11_generic_tc_check, FailParams, StdVerifReporterWithSender, TcStateAccepted,
VerificationToken,
};
use satrs_core::pus::{event, GenericTcCheckError};
use satrs_core::res_code::ResultU16;
use satrs_core::spacepackets::ecss::{scheduling, PusServiceId};
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider};
use satrs_core::tmtc::{AddressableId, PusServiceProvider, TargetId};
use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader,
};
use eurosim_obsw::{hk_err, tmtc_err, CustomPusServiceId, TEST_EVENT};
use std::cell::RefCell;
use crate::action;
use crate::action::ActionRequest;
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use satrs_core::pus::scheduling::PusScheduler;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use log::{debug};
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use crate::action;
use crate::action::ActionRequest;
pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper,
pub tm_args: PusTmArgs,
pub tc_args: PusTcArgs,
stamp_helper: TimeStampHelper,
}
pub struct PusTmArgs {
/// All telemetry is sent with this sender handle.
pub tm_tx: Sender<StoreAddr>,
/// All TM to be sent is stored here
pub tm_store: TmStore,
/// All verification reporting is done with this reporter.
pub verif_reporter: StdVerifReporterWithSender,
#[allow(dead_code)]
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
}
impl PusTmArgs {
fn vr(&mut self) -> &mut StdVerifReporterWithSender {
&mut self.verif_reporter
}
}
pub struct PusTcArgs {
pub event_request_tx: Sender<EventRequestWithToken>,
/// Request routing helper. Maps targeted requests to their recipient.
pub request_map: HashMap<TargetId, Sender<RequestWithToken>>,
/// Required for scheduling of telecommands.
pub tc_source: PusTcSource,
pub event_sender: Sender<(EventU32, Option<Params>)>,
pub scheduler: Rc<RefCell<PusScheduler>>,
}
struct TimeStampHelper {
stamper: TimeProvider,
time_stamp: [u8; 7],
scheduler: Rc<RefCell<PusScheduler>>,
}
impl TimeStampHelper {
pub fn new() -> Self {
Self {
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
}
}
pub fn stamp(&self) -> &[u8] {
&self.time_stamp
}
pub fn update_from_now(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
}
impl PusReceiver {
pub fn new(
apid: u16,
tm_tx: Sender<StoreAddr>,
tm_store: TmStore,
verif_reporter: StdVerifReporterWithSender,
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
scheduler: Rc<RefCell<PusScheduler>>,
) -> Self {
pub fn new(apid: u16, tm_arguments: PusTmArgs, tc_arguments: PusTcArgs) -> Self {
Self {
tm_helper: PusTmWithCdsShortHelper::new(apid),
tm_tx,
tm_store,
verif_reporter,
tc_source,
event_request_tx,
request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
scheduler,
tm_args: tm_arguments,
tc_args: tc_arguments,
stamp_helper: TimeStampHelper::new(),
}
}
}
@ -77,59 +113,106 @@ impl PusServiceProvider for PusReceiver {
_header: &SpHeader,
pus_tc: &PusTc,
) -> Result<(), Self::Error> {
let init_token = self.verif_reporter.add_tc(pus_tc);
self.update_time_stamp();
let init_token = self.tm_args.verif_reporter.add_tc(pus_tc);
self.stamp_helper.update_from_now();
let accepted_token = self
.verif_reporter
.acceptance_success(init_token, Some(&self.time_stamp))
.tm_args
.vr()
.acceptance_success(init_token, Some(self.stamp_helper.stamp()))
.expect("Acceptance success failure");
if service == 17 {
self.handle_test_service(pus_tc, accepted_token);
} else if service == 5 {
self.handle_event_request(pus_tc, accepted_token);
} else if service == 3 {
self.handle_hk_request(pus_tc, accepted_token);
} else if service == 8 {
self.handle_function_request(pus_tc, accepted_token);
} else if service == 11 {
self.handle_scheduled_tc(pus_tc, accepted_token);
} else {
self.update_time_stamp();
self.verif_reporter
let service = PusServiceId::try_from(service);
match service {
Ok(standard_service) => match standard_service {
PusServiceId::Test => self.handle_test_service(pus_tc, accepted_token),
PusServiceId::Housekeeping => self.handle_hk_request(pus_tc, accepted_token),
PusServiceId::Event => self.handle_event_request(pus_tc, accepted_token),
PusServiceId::Scheduling => self.handle_scheduled_tc(pus_tc, accepted_token),
PusServiceId::Action => self.handle_action_request(pus_tc, accepted_token),
_ => self
.tm_args
.verif_reporter
.start_failure(
accepted_token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::INVALID_PUS_SERVICE, None),
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::PUS_SERVICE_NOT_IMPLEMENTED,
Some(&[standard_service as u8]),
),
)
.expect("Start failure verification failed"),
},
Err(e) => {
if let Ok(custom_service) = CustomPusServiceId::try_from(e.number) {
match custom_service {
CustomPusServiceId::Mode => {
self.handle_mode_service(pus_tc, accepted_token)
}
CustomPusServiceId::Health => {}
}
} else {
self.tm_args
.verif_reporter
.start_failure(
accepted_token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
Some(&[e.number]),
),
)
.expect("Start failure verification failed")
}
}
}
Ok(())
}
}
impl PusReceiver {
fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if PusPacket::subservice(pus_tc) == 1 {
debug!("Received PUS ping command TC[17,1]");
debug!("Sending ping reply PUS TM[17,2]");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
let addr = self.tm_store.add_pus_tm(&ping_reply);
match PusPacket::subservice(pus_tc) {
1 => {
info!("Received PUS ping command TC[17,1]");
info!("Sending ping reply PUS TM[17,2]");
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
self.tm_tx
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply);
self.tm_args
.tm_tx
.send(addr)
.expect("Sending TM to TM funnel failed");
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
} else {
self.update_time_stamp();
self.verif_reporter
}
128 => {
info!("Generating test event");
self.tc_args
.event_sender
.send((TEST_EVENT.into(), None))
.expect("Sending test event failed");
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
_ => {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
@ -137,23 +220,19 @@ impl PusReceiver {
.expect("Sending start failure TM failed");
}
}
fn update_time_stamp(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
@ -165,28 +244,43 @@ impl PusReceiver {
} else {
&hk_err::UNIQUE_ID_MISSING
};
self.update_time_stamp();
self.verif_reporter
.start_failure(token, FailParams::new(Some(&self.time_stamp), err, None))
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(Some(self.stamp_helper.stamp()), err, None),
)
.expect("Sending start failure TM failed");
return;
}
let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp();
self.verif_reporter
if !self
.tc_args
.request_map
.contains_key(&addressable_id.target_id)
{
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &hk_err::UNKNOWN_TARGET_ID, None),
FailParams::new(
Some(self.stamp_helper.stamp()),
&hk_err::UNKNOWN_TARGET_ID,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
let send_request = |request: HkRequest| {
let sender = self.request_map.get(&addressable_id.target_id).unwrap();
let sender = self
.tc_args
.request_map
.get(&addressable_id.target_id)
.unwrap();
sender
.send(RequestWithToken(Request::HkRequest(request), token))
.unwrap_or_else(|_| panic!("Sending HK request {:?} failed", request));
.unwrap_or_else(|_| panic!("Sending HK request {request:?} failed"));
};
if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableHkGeneration as u8 {
send_request(HkRequest::Enable(addressable_id));
@ -198,12 +292,12 @@ impl PusReceiver {
== hk::Subservice::TcModifyHkCollectionInterval as u8
{
if user_data.len() < 12 {
self.update_time_stamp();
self.verif_reporter
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
Some(self.stamp_helper.stamp()),
&hk_err::COLLECTION_INTERVAL_MISSING,
None,
),
@ -217,29 +311,26 @@ impl PusReceiver {
));
}
}
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let send_start_failure = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7],
let send_start_failure = |vr: &mut StdVerifReporterWithSender,
timestamp: &[u8],
failure_code: &ResultU16,
failure_data: Option<&[u8]>| {
verif_reporter
.start_failure(
vr.start_failure(
token,
FailParams::new(Some(timestamp), failure_code, failure_data),
)
.expect("Sending start failure TM failed");
};
let send_start_acceptance = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7]| {
verif_reporter
.start_success(token, Some(timestamp))
let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| {
vr.start_success(token, Some(timestamp))
.expect("Sending start success TM failed")
};
if pus_tc.user_data().is_none() {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&mut self.tm_args.verif_reporter,
self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
@ -247,10 +338,9 @@ impl PusReceiver {
}
let app_data = pus_tc.user_data().unwrap();
if app_data.len() < 4 {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&mut self.tm_args.verif_reporter,
self.stamp_helper.stamp(),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
@ -258,20 +348,26 @@ impl PusReceiver {
}
let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
match PusPacket::subservice(pus_tc).try_into() {
Ok(Subservice::TcEnableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
self.event_request_tx
Ok(event::Subservice::TcEnableEventGeneration) => {
let start_token = send_start_acceptance(
&mut self.tm_args.verif_reporter,
self.stamp_helper.stamp(),
);
self.tc_args
.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Enable(event_id),
token: start_token,
})
.expect("Sending event request failed");
}
Ok(Subservice::TcDisableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
self.event_request_tx
Ok(event::Subservice::TcDisableEventGeneration) => {
let start_token = send_start_acceptance(
&mut self.tm_args.verif_reporter,
self.stamp_helper.stamp(),
);
self.tc_args
.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Disable(event_id),
token: start_token,
@ -279,10 +375,9 @@ impl PusReceiver {
.expect("Sending event request failed");
}
_ => {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&mut self.tm_args.verif_reporter,
self.stamp_helper.stamp(),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
);
@ -290,17 +385,252 @@ impl PusReceiver {
}
}
fn handle_function_request(
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let subservice = match pus_11_generic_tc_check(pus_tc) {
Ok(subservice) => subservice,
Err(e) => match e {
GenericTcCheckError::NotEnoughAppData => {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("could not sent verification error");
return;
}
GenericTcCheckError::InvalidSubservice => {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("could not sent verification error");
return;
}
},
};
match subservice {
scheduling::Subservice::TcEnableScheduling => {
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler.enable();
if scheduler.is_enabled() {
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
} else {
panic!("Failed to enable scheduler");
}
}
scheduling::Subservice::TcDisableScheduling => {
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler.disable();
if !scheduler.is_enabled() {
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
} else {
panic!("Failed to disable scheduler");
}
}
scheduling::Subservice::TcResetScheduling => {
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success");
let mut pool = self
.tc_args
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler
.reset(pool.as_mut())
.expect("Error resetting TC Pool");
drop(scheduler);
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("Error sending completion success");
}
scheduling::Subservice::TcInsertActivity => {
let start_token = self
.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("error sending start success");
let mut pool = self
.tc_args
.tc_source
.tc_store
.pool
.write()
.expect("locking pool failed");
let mut scheduler = self.tc_args.scheduler.borrow_mut();
scheduler
.insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
.expect("insertion of activity into pool failed");
drop(scheduler);
self.tm_args
.verif_reporter
.completion_success(start_token, Some(self.stamp_helper.stamp()))
.expect("sending completion success failed");
}
_ => {}
}
}
fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let mut app_data_len = 0;
let app_data = pus_tc.user_data();
if app_data.is_some() {
app_data_len = pus_tc.user_data().unwrap().len();
}
if app_data_len < 4 {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
Some(format!("expected {} bytes, found {}", 4, app_data_len).as_bytes()),
),
)
.expect("Sending start failure TM failed");
}
let app_data = app_data.unwrap();
let mut invalid_subservice_handler = || {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
Some(&[PusPacket::subservice(pus_tc)]),
),
)
.expect("Sending start failure TM failed");
};
let subservice = mode::Subservice::try_from(PusPacket::subservice(pus_tc));
if let Ok(subservice) = subservice {
let forward_mode_request = |target_id, mode_request: ModeRequest| match self
.tc_args
.request_map
.get(&target_id)
{
None => warn!("not mode request recipient for target ID {target_id} found"),
Some(sender_to_recipient) => {
sender_to_recipient
.send(RequestWithToken(Request::ModeRequest(mode_request), token))
.expect("sending mode request failed");
}
};
let mut valid_subservice = true;
match subservice {
Subservice::TcSetMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
let min_len = ModeAndSubmode::raw_len() + 4;
if app_data_len < min_len {
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::NOT_ENOUGH_APP_DATA,
Some(
format!("expected {min_len} bytes, found {app_data_len}")
.as_bytes(),
),
),
)
.expect("Sending start failure TM failed");
}
// Should never fail after size check
let mode_submode = ModeAndSubmode::from_be_bytes(
app_data[4..4 + ModeAndSubmode::raw_len()]
.try_into()
.unwrap(),
)
.unwrap();
forward_mode_request(
target_id,
ModeRequest::SetMode(ModeCommand::new(target_id, mode_submode)),
);
}
Subservice::TcReadMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::ReadMode(target_id));
}
Subservice::TcAnnounceMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceMode(target_id));
}
Subservice::TcAnnounceModeRecursive => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceModeRecursive(target_id));
}
_ => {
warn!("Can not process mode request with subservice {subservice:?}");
invalid_subservice_handler();
valid_subservice = false;
}
}
if valid_subservice {
self.tm_args
.verif_reporter
.start_success(token, Some(self.stamp_helper.stamp()))
.expect("sending start success TM failed");
}
} else {
invalid_subservice_handler();
}
}
fn handle_action_request(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
self.stamp_helper.stamper.update_from_now().unwrap();
self.tm_args.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
FailParams::new(Some(&self.stamp_helper.stamp()), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
@ -309,14 +639,14 @@ impl PusReceiver {
let send_request = |request: ActionRequest| match request {
ActionRequest::ImageRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
let sender = self.tc_args.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
ActionRequest::OrientationRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
let sender = self.tc_args.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
@ -330,117 +660,4 @@ impl PusReceiver {
send_request(ActionRequest::OrientationRequest(AcsSubsystem));
}
}
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
self.update_time_stamp();
match pus_tc.subservice() {
1 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.enable();
if scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to enable scheduler");
}
drop(scheduler);
}
2 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.disable();
if !scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to disable scheduler");
}
drop(scheduler);
}
3 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.reset(pool.as_mut())
.expect("Error resetting TC Pool");
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
}
4 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
.expect("TODO: panic message");
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
//let addr = self.tc_source.tc_store.add_pus_tc().unwrap();
//let unix_time = UnixTimestamp::new_only_seconds(self.stamper.unix_seconds());
//let worked = self.scheduler.insert_tc(unix_time, );
}
_ => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
}
}
}

View File

@ -1,12 +1,19 @@
use crate::action::ActionRequest;
use crate::hk::HkRequest;
use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest;
use satrs_core::pus::verification::{TcStateAccepted, VerificationToken};
use crate::action::ActionRequest;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[non_exhaustive]
pub enum Request {
HkRequest(HkRequest),
ActionRequest(ActionRequest),
ModeRequest(ModeRequest),
ActionRequest(ActionRequest)
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct RequestWithToken(pub Request, pub VerificationToken<TcStateAccepted>);
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct RequestWithOptionalToken(pub Request, pub Option<VerificationToken<TcStateAccepted>>);

446
src/temp_pus.rs Normal file
View File

@ -0,0 +1,446 @@
use crate::hk::{CollectionIntervalFactor, HkRequest};
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::{PusTcSource, TmStore};
use eurosim_obsw::{hk_err, tmtc_err};
use satrs_core::events::EventU32;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::event::Subservice;
use satrs_core::pus::event_man::{EventRequest, EventRequestWithToken};
use satrs_core::pus::hk;
use satrs_core::pus::verification::{
FailParams, StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
};
use satrs_core::res_code::ResultU16;
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
use satrs_core::tmtc::{AddressableId, PusServiceProvider};
use satrs_core::{
spacepackets::ecss::PusPacket, spacepackets::tc::PusTc, spacepackets::time::cds::TimeProvider,
spacepackets::time::TimeWriter, spacepackets::SpHeader,
};
use std::cell::RefCell;
use crate::action;
use crate::action::ActionRequest;
use eurosim_obsw::RequestTargetId::{AcsSubsystem, PldSubsystem};
use satrs_core::pus::scheduling::PusScheduler;
use std::collections::HashMap;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use log::{debug};
pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper,
pub tm_tx: Sender<StoreAddr>,
pub tm_store: TmStore,
pub verif_reporter: StdVerifReporterWithSender,
#[allow(dead_code)]
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
stamper: TimeProvider,
time_stamp: [u8; 7],
scheduler: Rc<RefCell<PusScheduler>>,
}
impl PusReceiver {
pub fn new(
apid: u16,
tm_tx: Sender<StoreAddr>,
tm_store: TmStore,
verif_reporter: StdVerifReporterWithSender,
tc_source: PusTcSource,
event_request_tx: Sender<EventRequestWithToken>,
request_map: HashMap<u32, Sender<RequestWithToken>>,
scheduler: Rc<RefCell<PusScheduler>>,
) -> Self {
Self {
tm_helper: PusTmWithCdsShortHelper::new(apid),
tm_tx,
tm_store,
verif_reporter,
tc_source,
event_request_tx,
request_map,
stamper: TimeProvider::new_with_u16_days(0, 0),
time_stamp: [0; 7],
scheduler,
}
}
}
impl PusServiceProvider for PusReceiver {
type Error = ();
fn handle_pus_tc_packet(
&mut self,
service: u8,
_header: &SpHeader,
pus_tc: &PusTc,
) -> Result<(), Self::Error> {
let init_token = self.verif_reporter.add_tc(pus_tc);
self.update_time_stamp();
let accepted_token = self
.verif_reporter
.acceptance_success(init_token, Some(&self.time_stamp))
.expect("Acceptance success failure");
if service == 17 {
self.handle_test_service(pus_tc, accepted_token);
} else if service == 5 {
self.handle_event_request(pus_tc, accepted_token);
} else if service == 3 {
self.handle_hk_request(pus_tc, accepted_token);
} else if service == 8 {
self.handle_function_request(pus_tc, accepted_token);
} else if service == 11 {
self.handle_scheduled_tc(pus_tc, accepted_token);
} else {
self.update_time_stamp();
self.verif_reporter
.start_failure(
accepted_token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::INVALID_PUS_SERVICE, None),
)
.expect("Start failure verification failed")
}
Ok(())
}
}
impl PusReceiver {
fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if PusPacket::subservice(pus_tc) == 1 {
debug!("Received PUS ping command TC[17,1]");
debug!("Sending ping reply PUS TM[17,2]");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None);
let addr = self.tm_store.add_pus_tm(&ping_reply);
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
self.tm_tx
.send(addr)
.expect("Sending TM to TM funnel failed");
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("Sending start failure TM failed");
}
}
fn update_time_stamp(&mut self) {
self.stamper
.update_from_now()
.expect("Updating timestamp failed");
self.stamper
.write_to_bytes(&mut self.time_stamp)
.expect("Writing timestamp failed");
}
fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
let user_data = pus_tc.user_data().unwrap();
if user_data.len() < 8 {
let err = if user_data.len() < 4 {
&hk_err::TARGET_ID_MISSING
} else {
&hk_err::UNIQUE_ID_MISSING
};
self.update_time_stamp();
self.verif_reporter
.start_failure(token, FailParams::new(Some(&self.time_stamp), err, None))
.expect("Sending start failure TM failed");
return;
}
let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &hk_err::UNKNOWN_TARGET_ID, None),
)
.expect("Sending start failure TM failed");
return;
}
let send_request = |request: HkRequest| {
let sender = self.request_map.get(&addressable_id.target_id).unwrap();
sender
.send(RequestWithToken(Request::HkRequest(request), token))
.unwrap_or_else(|_| panic!("Sending HK request {:?} failed", request));
};
if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableHkGeneration as u8 {
send_request(HkRequest::Enable(addressable_id));
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableHkGeneration as u8 {
send_request(HkRequest::Disable(addressable_id));
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 {
send_request(HkRequest::OneShot(addressable_id));
} else if PusPacket::subservice(pus_tc)
== hk::Subservice::TcModifyHkCollectionInterval as u8
{
if user_data.len() < 12 {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&hk_err::COLLECTION_INTERVAL_MISSING,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
send_request(HkRequest::ModifyCollectionInterval(
addressable_id,
CollectionIntervalFactor::from_be_bytes(user_data[8..12].try_into().unwrap()),
));
}
}
fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
let send_start_failure = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7],
failure_code: &ResultU16,
failure_data: Option<&[u8]>| {
verif_reporter
.start_failure(
token,
FailParams::new(Some(timestamp), failure_code, failure_data),
)
.expect("Sending start failure TM failed");
};
let send_start_acceptance = |verif_reporter: &mut StdVerifReporterWithSender,
timestamp: &[u8; 7]| {
verif_reporter
.start_success(token, Some(timestamp))
.expect("Sending start success TM failed")
};
if pus_tc.user_data().is_none() {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
return;
}
let app_data = pus_tc.user_data().unwrap();
if app_data.len() < 4 {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
);
return;
}
let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
match PusPacket::subservice(pus_tc).try_into() {
Ok(Subservice::TcEnableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
self.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Enable(event_id),
token: start_token,
})
.expect("Sending event request failed");
}
Ok(Subservice::TcDisableEventGeneration) => {
self.update_time_stamp();
let start_token = send_start_acceptance(&mut self.verif_reporter, &self.time_stamp);
self.event_request_tx
.send(EventRequestWithToken {
request: EventRequest::Disable(event_id),
token: start_token,
})
.expect("Sending event request failed");
}
_ => {
self.update_time_stamp();
send_start_failure(
&mut self.verif_reporter,
&self.time_stamp,
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
);
}
}
}
fn handle_function_request(
&mut self,
pus_tc: &PusTc,
token: VerificationToken<TcStateAccepted>,
) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
let send_request = |request: ActionRequest| match request {
ActionRequest::ImageRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
ActionRequest::OrientationRequest(target_id) => {
let id = target_id as u32;
let sender = self.request_map.get(&id).unwrap();
sender
.send(RequestWithToken(Request::ActionRequest(request), token))
.unwrap_or_else(|_| panic!("Sending Action request {:?} failed", request));
}
_ => {}
};
if PusPacket::subservice(pus_tc) == action::Subservice::ImageRequest as u8 {
send_request(ActionRequest::ImageRequest(PldSubsystem));
} else if PusPacket::subservice(pus_tc) == action::Subservice::OrientationRequest as u8 {
send_request(ActionRequest::OrientationRequest(AcsSubsystem));
}
}
fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
if pus_tc.user_data().is_none() {
self.update_time_stamp();
self.verif_reporter
.start_failure(
token,
FailParams::new(Some(&self.time_stamp), &tmtc_err::NOT_ENOUGH_APP_DATA, None),
)
.expect("Sending start failure TM failed");
return;
}
self.update_time_stamp();
match pus_tc.subservice() {
1 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.enable();
if scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to enable scheduler");
}
drop(scheduler);
}
2 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut scheduler = self.scheduler.borrow_mut();
scheduler.disable();
if !scheduler.is_enabled() {
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
} else {
panic!("Failed to disable scheduler");
}
drop(scheduler);
}
3 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.reset(pool.as_mut())
.expect("Error resetting TC Pool");
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
}
4 => {
let start_token = self
.verif_reporter
.start_success(token, Some(&self.time_stamp))
.expect("Error sending start success");
let mut pool = self
.tc_source
.tc_store
.pool
.write()
.expect("Locking pool failed");
let mut scheduler = self.scheduler.borrow_mut();
scheduler
.insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
.expect("TODO: panic message");
drop(scheduler);
self.verif_reporter
.completion_success(start_token, Some(&self.time_stamp))
.expect("Error sending completion success");
//let addr = self.tc_source.tc_store.add_pus_tc().unwrap();
//let unix_time = UnixTimestamp::new_only_seconds(self.stamper.unix_seconds());
//let worked = self.scheduler.insert_tc(unix_time, );
}
_ => {
self.verif_reporter
.start_failure(
token,
FailParams::new(
Some(&self.time_stamp),
&tmtc_err::NOT_ENOUGH_APP_DATA,
None,
),
)
.expect("Sending start failure TM failed");
return;
}
}
}
}

View File

@ -1,3 +1,4 @@
use log::info;
use satrs_core::events::EventU32;
use satrs_core::hal::host::udp_server::{ReceiveResult, UdpTcServer};
use satrs_core::params::Params;
@ -12,7 +13,7 @@ use std::thread;
use std::time::Duration;
use crate::ccsds::CcsdsReceiver;
use crate::pus::PusReceiver;
use crate::pus::{PusReceiver, PusTcArgs, PusTmArgs};
use crate::requests::RequestWithToken;
use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
use satrs_core::pus::event_man::EventRequestWithToken;
@ -45,6 +46,7 @@ pub struct TcArgs {
}
impl TcArgs {
#[allow(dead_code)]
fn split(self) -> (PusTcSource, Receiver<StoreAddr>) {
(self.tc_source, self.tc_receiver)
}
@ -160,22 +162,26 @@ impl ReceivesCcsdsTc for PusTcSource {
Ok(())
}
}
pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
let scheduler = Rc::new(RefCell::new(
PusScheduler::new_with_current_init_time(Duration::from_secs(5)).unwrap(),
));
let sched_clone = scheduler.clone();
let mut pus_receiver = PusReceiver::new(
PUS_APID,
tm_args.tm_sink_sender,
tm_args.tm_store.clone(),
args.verif_reporter,
tc_args.tc_source.clone(),
args.event_request_tx,
args.request_map,
sched_clone,
);
let pus_tm_args = PusTmArgs {
tm_tx: tm_args.tm_sink_sender,
tm_store: tm_args.tm_store.clone(),
verif_reporter: args.verif_reporter,
};
let pus_tc_args = PusTcArgs {
event_request_tx: args.event_request_tx,
request_map: args.request_map,
tc_source: tc_args.tc_source.clone(),
event_sender: args.event_sender,
scheduler: sched_clone,
};
let mut pus_receiver = PusReceiver::new(PUS_APID, pus_tm_args, pus_tc_args);
let ccsds_receiver = CcsdsReceiver {
tc_source: tc_args.tc_source.clone(),
@ -192,8 +198,6 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
tm_store: tm_args.tm_store.pool.clone(),
};
//let (mut tc_source, mut tc_receiver) = tc_args.split();
let mut tc_buf: [u8; 4096] = [0; 4096];
loop {
let tmtc_sched = scheduler.clone();
@ -201,8 +205,6 @@ pub fn core_tmtc_task(args: OtherArgs, mut tc_args: TcArgs, tm_args: TmArgs) {
&mut udp_tmtc_server,
&mut tc_args,
&mut tc_buf,
//&mut tc_source,
//&mut tc_receiver,
&mut pus_receiver,
tmtc_sched,
);
@ -214,8 +216,6 @@ fn core_tmtc_loop(
udp_tmtc_server: &mut UdpTmtcServer,
tc_args: &mut TcArgs,
tc_buf: &mut [u8],
//tc_source: &mut PusTcSource,
//tc_receiver: &mut Receiver<StoreAddr>,
pus_receiver: &mut PusReceiver,
scheduler: Rc<RefCell<PusScheduler>>,
) {
@ -241,7 +241,7 @@ fn core_tmtc_loop(
scheduler.update_time_from_now().unwrap();
if let Ok(released_tcs) = scheduler.release_telecommands(releaser, pool.as_mut()) {
if released_tcs > 0 {
println!("{released_tcs} TC(s) released from scheduler");
info!("{released_tcs} TC(s) released from scheduler");
}
}
drop(pool);
@ -316,9 +316,9 @@ fn core_tm_handling(udp_tmtc_server: &mut UdpTmtcServer, recv_addr: &SocketAddr)
if buf.len() > 9 {
let service = buf[7];
let subservice = buf[8];
println!("Sending PUS TM[{service},{subservice}]")
info!("Sending PUS TM[{service},{subservice}]")
} else {
println!("Sending PUS TM");
info!("Sending PUS TM");
}
udp_tmtc_server
.udp_tc_server