finished seq count, aocs handlers, housekeeper, maybe other stuff

This commit is contained in:
lkoester 2023-03-17 11:09:17 +01:00
parent 8ceb2c7a79
commit 51471c0a2d
8 changed files with 864894 additions and 336 deletions

863835
output.log

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ from typing import Optional
import datetime import datetime
import json import json
import pprint import pprint
import struct
import tmtccmd import tmtccmd
from spacepackets.ecss import PusTelemetry, PusTelecommand, PusVerificator from spacepackets.ecss import PusTelemetry, PusTelecommand, PusVerificator
@ -17,7 +18,7 @@ from spacepackets.ecss.pus_1_verification import UnpackParams, Service1Tm
from spacepackets.ccsds.time import CdsShortTimestamp from spacepackets.ccsds.time import CdsShortTimestamp
from tmtccmd import CcsdsTmtcBackend, TcHandlerBase, ProcedureParamsWrapper from tmtccmd import CcsdsTmtcBackend, TcHandlerBase, ProcedureParamsWrapper
from tmtccmd.tc.pus_3_fsfw_hk import generate_one_hk_command, make_sid from tmtccmd.tc.pus_3_fsfw_hk import generate_one_hk_command, make_sid, create_enable_periodic_hk_command, create_disable_periodic_hk_command
from tmtccmd.tc.pus_11_tc_sched import create_time_tagged_cmd from tmtccmd.tc.pus_11_tc_sched import create_time_tagged_cmd
from tmtccmd.core.base import BackendRequest from tmtccmd.core.base import BackendRequest
from tmtccmd.pus import VerificationWrapper from tmtccmd.pus import VerificationWrapper
@ -56,7 +57,7 @@ from tmtccmd.util.tmtc_printer import FsfwTmTcPrinter
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
EXAMPLE_PUS_APID = 0x02 EXAMPLE_PUS_APID = 0x50
class SatRsConfigHook(HookBase): class SatRsConfigHook(HookBase):
@ -96,6 +97,8 @@ class SatRsConfigHook(HookBase):
) )
srv_3 = OpCodeEntry() srv_3 = OpCodeEntry()
srv_3.add(HkOpCodes.GENERATE_ONE_SHOT, "Generate AOCS one shot HK") srv_3.add(HkOpCodes.GENERATE_ONE_SHOT, "Generate AOCS one shot HK")
srv_3.add(HkOpCodes.ENABLE_PERIODIC, "Enable Periodic AOCS HK")
srv_3.add(HkOpCodes.DISABLE_PERIODIC, "Disable Periodic AOCS HK")
defs.add_service( defs.add_service(
name=CoreServiceList.SERVICE_3, name=CoreServiceList.SERVICE_3,
info="PUS Service 3 Housekeeping", info="PUS Service 3 Housekeeping",
@ -118,6 +121,15 @@ class SatRsConfigHook(HookBase):
info="PUS Service 11 TC Scheduling", info="PUS Service 11 TC Scheduling",
op_code_entry=srv_11, op_code_entry=srv_11,
) )
srv_200 = OpCodeEntry()
srv_200.add("0", "AOCS Mode Idle")
srv_200.add("1", "AOCS Mode On")
defs.add_service(
name = CoreServiceList.SERVICE_200,
info="PUS Custom Service 200 Mode",
op_code_entry=srv_200,
)
return defs return defs
def perform_mode_operation(self, tmtc_backend: CcsdsTmtcBackend, mode: int): def perform_mode_operation(self, tmtc_backend: CcsdsTmtcBackend, mode: int):
@ -178,9 +190,17 @@ class PusHandler(SpecificApidHandlerBase):
if pus_tm.subservice == 25: if pus_tm.subservice == 25:
if len(pus_tm.source_data) < 8: if len(pus_tm.source_data) < 8:
raise ValueError("No addressable ID in HK packet") raise ValueError("No addressable ID in HK packet")
json_str = str(pus_tm.source_data[8:].decode('utf8')) #json_str = str(pus_tm.source_data[8:].decode('utf8'))
json_object = json.loads(json_str) #json_object = json.loads(json_str)
pprint.pprint(json_object) #pprint.pprint(json_object)
print(pus_tm.tm_data.hex())
i = 8
print(struct.unpack('d', pus_tm.tm_data[8:16]))
while i < len(pus_tm.tm_data):
print(pus_tm.tm_data[i:i+8].hex())
print(struct.unpack('d', pus_tm.tm_data[i:i+8]))
i += 8
dedicated_handler = True dedicated_handler = True
if service == 5: if service == 5:
tm_packet = Service5Tm.unpack(packet, time_reader=CdsShortTimestamp.empty()) tm_packet = Service5Tm.unpack(packet, time_reader=CdsShortTimestamp.empty())
@ -242,12 +262,19 @@ class AcsHkIds(enum.IntEnum):
class HkOpCodes: class HkOpCodes:
GENERATE_ONE_SHOT = ["0", "oneshot"] GENERATE_ONE_SHOT = ["0", "oneshot"]
ENABLE_PERIODIC = ["1", "enable periodic"]
DISABLE_PERIODIC = ["2", "disable periodic"]
def make_target_id(target_id: int) -> bytes: def make_target_id(target_id: int) -> bytes:
byte_string = bytearray(struct.pack("!I", target_id)) byte_string = bytearray(struct.pack("!I", target_id))
return byte_string return byte_string
def make_mode_submode(mode: int, submode: int) -> bytes:
byte_string = bytearray(struct.pack("!I", mode))
byte_string.extend(struct.pack("!I", submode))
return byte_string
class TcHandler(TcHandlerBase): class TcHandler(TcHandlerBase):
def __init__( def __init__(
@ -294,6 +321,15 @@ class TcHandler(TcHandlerBase):
def_proc = helper.to_def_procedure() def_proc = helper.to_def_procedure()
service = def_proc.service service = def_proc.service
op_code = def_proc.op_code op_code = def_proc.op_code
if service == CoreServiceList.SERVICE_200:
q.add_log_cmd("Sending PUS Mode Request telecommand")
if op_code == "0":
tc = PusTelecommand(service=200, subservice=1, app_data=make_mode_submode(0,0), apid=0x50)
return q.add_pus_tc(tc)
if op_code == "1":
tc = PusTelecommand(service=200, subservice=1, app_data=make_mode_submode(1,0), apid=0x50)
return q.add_pus_tc(tc)
if ( if (
service == CoreServiceList.SERVICE_17 service == CoreServiceList.SERVICE_17
or service == CoreServiceList.SERVICE_17_ALT or service == CoreServiceList.SERVICE_17_ALT
@ -346,6 +382,16 @@ class TcHandler(TcHandlerBase):
make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET) make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET)
) )
) )
if op_code in HkOpCodes.ENABLE_PERIODIC:
q.add_log_cmd("Sending periodic HK request")
tc = create_enable_periodic_hk_command(False, make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET))
q.add_log_cmd(tc)
q.add_pus_tc(tc)
if op_code in HkOpCodes.DISABLE_PERIODIC:
q.add_log_cmd("Sending periodic HK request")
tc = create_disable_periodic_hk_command(False, make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET))
q.add_log_cmd(tc)
q.add_pus_tc(tc)
pass pass

View File

@ -1,32 +1,36 @@
use crate::action::ActionRequest; use crate::action::ActionRequest;
use crate::aocs::AOCSSensorMode::{Idle, SendingData};
use crate::can_ids::PackageId::AOCSDataStarTracker1;
use crate::can_ids::{DeviceId, PackageId, PackageModel}; use crate::can_ids::{DeviceId, PackageId, PackageModel};
use crate::hk::{AOCSSensorData, AocsHousekeeper}; use crate::helpers::{ModeHelper, VerifHelper};
use crate::hk::{AocsDataMap, AocsDataType, AocsHousekeeper, CSS_VOLTAGE_1, CSS_VOLTAGE_2, CSS_VOLTAGE_3, CSS_VOLTAGE_4, CSS_VOLTAGE_5, CSS_VOLTAGE_6, MGM_VOLTAGE_1, MGM_VOLTAGE_2, MGM_VOLTAGE_3, STR_QUATERNION_1, STR_QUATERNION_2, STR_QUATERNION_3, STR_QUATERNION_4};
use crate::power_handler::{DeviceState, PowerSwitcher}; use crate::power_handler::{DeviceState, PowerSwitcher};
use crate::requests::{Request, RequestWithToken}; use crate::requests::{Request, RequestWithToken};
use crate::tmtc::TmStore; use crate::tmtc::TmStore;
use byteorder::{ByteOrder, LittleEndian}; use byteorder::{ByteOrder, LittleEndian};
use num_derive::ToPrimitive; use num_derive::ToPrimitive;
use satrs_core::hk::HkRequest; use satrs_core::hk::HkRequest;
use satrs_core::mode::ModeRequest; use satrs_core::mode::{ModeAndSubmode, ModeRequest};
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
use satrs_core::power::SwitchId; use satrs_core::power::{PowerSwitcherCommandSender, SwitchId};
use satrs_core::pus::verification::{ use satrs_core::pus::verification::{
TcStateAccepted, TcStateNone, VerificationReporterWithSender, VerificationToken, TcStateAccepted, TcStateNone, VerificationReporterWithSender, VerificationToken,
}; };
use satrs_core::pus::MpscPusInStoreSendError; use satrs_core::pus::MpscPusInStoreSendError;
use satrs_core::seq_count::SeqCountProviderSyncClonable; use satrs_core::seq_count::SeqCountProviderSyncClonable;
use satrs_core::spacepackets::time::cds::TimeProvider;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::mpsc::{channel, Receiver, Sender}; use std::sync::mpsc::{channel, Receiver, Sender, TryRecvError};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::u32; use std::u32;
#[derive(ToPrimitive)] #[derive(ToPrimitive, PartialEq, Copy, Clone)]
pub enum AOCSSensorMode { pub enum AOCSSensorMode {
Idle = 0, Idle = 0,
SendingData = 1, SendingData = 1,
} }
pub trait AOCSSensorHandler { pub trait AocsSensorHandler {
type Error; type Error;
fn get_package_id(&mut self) -> Result<PackageId, Self::Error>; fn get_package_id(&mut self) -> Result<PackageId, Self::Error>;
@ -48,138 +52,21 @@ pub trait AOCSSensorHandler {
} }
} }
#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize)]
pub struct MGMData {
axis_1: f64,
axis_2: f64,
axis_3: f64,
}
impl MGMData {
pub fn from_floats(axis_1: f64, axis_2: f64, axis_3: f64) -> MGMData {
MGMData {
axis_1,
axis_2,
axis_3,
}
}
pub fn new() -> MGMData {
MGMData {
axis_1: 0.0,
axis_2: 0.0,
axis_3: 0.0,
}
}
pub fn update(&mut self, axis_1: f64, axis_2: f64, axis_3: f64) {
self.axis_1 = axis_1;
self.axis_2 = axis_2;
self.axis_3 = axis_3;
}
pub fn to_array(&self) -> [f64; 3] {
[self.axis_1, self.axis_2, self.axis_3]
}
pub fn to_tuple(&self) -> (f64, f64, f64) {
(self.axis_1, self.axis_2, self.axis_3)
}
}
#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize)]
pub struct CSSData {
voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
}
impl CSSData {
pub fn from_floats(
voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
) -> CSSData {
CSSData {
voltage_1,
voltage_2,
voltage_3,
voltage_4,
voltage_5,
voltage_6,
}
}
pub fn new() -> CSSData {
CSSData {
voltage_1: 0.0,
voltage_2: 0.0,
voltage_3: 0.0,
voltage_4: 0.0,
voltage_5: 0.0,
voltage_6: 0.0,
}
}
pub fn update(
&mut self,
voltage_1: f64,
voltage_2: f64,
voltage_3: f64,
voltage_4: f64,
voltage_5: f64,
voltage_6: f64,
) {
self.voltage_1 = voltage_1;
self.voltage_2 = voltage_2;
self.voltage_3 = voltage_3;
self.voltage_4 = voltage_4;
self.voltage_5 = voltage_5;
self.voltage_6 = voltage_6;
}
pub fn to_array(&self) -> [f64; 6] {
[
self.voltage_1,
self.voltage_2,
self.voltage_3,
self.voltage_4,
self.voltage_5,
self.voltage_6,
]
}
pub fn to_tuple(&self) -> (f64, f64, f64, f64, f64, f64) {
(
self.voltage_1,
self.voltage_2,
self.voltage_3,
self.voltage_4,
self.voltage_5,
self.voltage_6,
)
}
}
pub struct CSSHandler { pub struct CSSHandler {
power_switcher: PowerSwitcher, power_switcher: PowerSwitcher,
device_id: DeviceId, device_id: DeviceId,
switch_id: SwitchId, switch_id: SwitchId,
device_state: DeviceState, device_state: DeviceState,
device_mode: AOCSSensorMode, mode: AOCSSensorMode,
css_data: Arc<Mutex<CSSData>>, aocs_data: Arc<Mutex<AocsDataMap>>,
can_tx: Sender<PackageModel>, can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>, can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
verif_helper: VerifHelper,
mode_helper: ModeHelper,
} }
impl AOCSSensorHandler for CSSHandler { impl AocsSensorHandler for CSSHandler {
type Error = (); type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> { fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
@ -198,73 +85,164 @@ impl CSSHandler {
pub fn new( pub fn new(
power_switcher: PowerSwitcher, power_switcher: PowerSwitcher,
device_id: DeviceId, device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState, device_state: DeviceState,
device_mode: AOCSSensorMode, aocs_data: Arc<Mutex<AocsDataMap>>,
css_data: Arc<Mutex<CSSData>>,
can_tx: Sender<PackageModel>, can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>, can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
) -> CSSHandler { verif_helper: VerifHelper,
CSSHandler { mode_helper: ModeHelper,
) -> Self {
let switch_id = device_id as u16;
Self {
power_switcher, power_switcher,
device_id, device_id,
switch_id, switch_id,
device_state, device_state,
device_mode, mode: Idle,
css_data: Arc::new(Mutex::new(CSSData::default())), aocs_data,
can_tx, can_tx,
can_rx, can_rx,
request_rx, request_rx,
verif_helper,
mode_helper,
} }
} }
pub fn get_data_ref(&mut self) -> Arc<Mutex<CSSData>> { pub fn periodic_op(&mut self) {
self.css_data.clone() self.handle_requests();
self.read_can();
} }
pub fn css_core_task(&mut self) { fn handle_requests(&mut self) {
self.handle_request_messages(); if let Ok(req) = self.request_rx.try_recv() {
} let (req, start_token) = self
.verif_helper
.start_and_unwrap(req)
.expect("error sending start of execution");
match req {
Request::HkRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
Request::ModeRequest(req) => match req {
ModeRequest::SetMode(mode_submode) => {
if let Some(aocs_mode) = mode_submode_to_aocs_mode(mode_submode) {
self.mode = aocs_mode;
}
fn handle_request_messages(&mut self) { if let Some(token) = start_token {
let request = self.request_rx.try_recv().unwrap(); self.verif_helper
let token = request.1.unwrap(); .completion(token)
match request.0 { .expect("error sending completion success");
Request::HkRequest(_) => {} }
Request::ModeRequest(request) => { }
self.handle_mode_request(request, token); ModeRequest::ReadMode => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
}
ModeRequest::AnnounceMode => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
ModeRequest::AnnounceModeRecursive => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion failure");
}
}
},
Request::ActionRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
} }
Request::ActionRequest(request) => self.handle_action_request(request, token),
} }
} }
fn set_mode_reading(&mut self) {
if self.mode == SendingData {
return;
}
fn handle_mode_request( // TODO: currently unrecoverable, should probably be changed
&mut self, self.power_switcher
request: ModeRequest, .send_switch_on_cmd(self.switch_id)
token: VerificationToken<TcStateAccepted>, .expect("error sending switch on cmd");
) { self.enable_sensor_data_generation()
match request { .expect("error enabling sensor data generation");
ModeRequest::SetMode(mode) => match mode.mode() { self.mode = SendingData;
0 => {} }
fn set_mode_idle(&mut self) {
if self.mode == Idle {
return;
}
self.disable_sensor_data_generation()
.expect("error disabling sensor data generation");
self.power_switcher
.send_switch_off_cmd(self.switch_id)
.expect("error sending switch off cmd");
self.mode = Idle;
}
fn read_can(&mut self) {
if let Ok(package) = self.can_rx.try_recv() {
let mut map = self.aocs_data.lock().expect("error locking data map");
let float_data = self.decode_sensor_data(package.data());
match package.package_id() {
PackageId::AOCSDataSunSensor1 => {
map.update_value(CSS_VOLTAGE_1, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataSunSensor2 => {
map.update_value(CSS_VOLTAGE_2, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataSunSensor3 => {
map.update_value(CSS_VOLTAGE_3, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataSunSensor4 => {
map.update_value(CSS_VOLTAGE_4, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataSunSensor5 => {
map.update_value(CSS_VOLTAGE_5, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataSunSensor6 => {
map.update_value(CSS_VOLTAGE_6, AocsDataType::float_value(float_data))
.expect("error updating value");
}
_ => {} _ => {}
}, }
ModeRequest::ReadMode => {} drop(map);
ModeRequest::AnnounceMode => {}
ModeRequest::AnnounceModeRecursive => {}
} }
} }
fn handle_action_request( fn decode_sensor_data(&self, buf: &[u8]) -> f64 {
&mut self, LittleEndian::read_f64(&buf)
request: ActionRequest,
token: VerificationToken<TcStateAccepted>,
) {
match request {
ActionRequest::ImageRequest(target_id) => {}
ActionRequest::OrientationRequest(_) => {}
ActionRequest::PointingRequest(_) => {}
}
} }
} }
@ -273,14 +251,16 @@ pub struct MGMHandler {
device_id: DeviceId, device_id: DeviceId,
switch_id: SwitchId, switch_id: SwitchId,
device_state: DeviceState, device_state: DeviceState,
mode: AOCSSensorMode,
aocs_data: Arc<Mutex<AocsDataMap>>,
can_tx: Sender<PackageModel>, can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>, can_rx: Receiver<PackageModel>,
mode: AOCSSensorMode,
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
mgm_data: Arc<Mutex<MGMData>>, verif_helper: VerifHelper,
mode_helper: ModeHelper,
} }
impl AOCSSensorHandler for MGMHandler { impl AocsSensorHandler for MGMHandler {
type Error = (); type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> { fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
@ -299,113 +279,478 @@ impl MGMHandler {
pub fn new( pub fn new(
power_switcher: PowerSwitcher, power_switcher: PowerSwitcher,
device_id: DeviceId, device_id: DeviceId,
device_state: DeviceState,
aocs_data: Arc<Mutex<AocsDataMap>>,
can_tx: Sender<PackageModel>, can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>, can_rx: Receiver<PackageModel>,
action_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
) -> MGMHandler { verif_helper: VerifHelper,
mode_helper: ModeHelper,
) -> Self {
let switch_id = device_id as u16; let switch_id = device_id as u16;
MGMHandler { Self {
power_switcher, power_switcher,
device_id, device_id,
switch_id, switch_id,
device_state: DeviceState::Off, device_state,
mode: Idle,
aocs_data,
can_tx, can_tx,
can_rx, can_rx,
mode: AOCSSensorMode::Idle, request_rx,
request_rx: action_rx, verif_helper,
mgm_data: Arc::new(Mutex::new(MGMData::new())), mode_helper,
} }
} }
pub fn get_data_ref(&mut self) -> Arc<Mutex<MGMData>> {
self.mgm_data.clone()
}
pub fn periodic_op(&mut self) { pub fn periodic_op(&mut self) {
self.update_mode();
self.handle_requests(); self.handle_requests();
self.read_sensor_data(); self.read_can();
} }
pub fn update_mode(&mut self) {} fn handle_requests(&mut self) {
if let Ok(req) = self.request_rx.try_recv() {
let (req, start_token) = self
.verif_helper
.start_and_unwrap(req)
.expect("error sending start of execution");
match req {
Request::HkRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
Request::ModeRequest(req) => match req {
ModeRequest::SetMode(mode_submode) => {
if let Some(aocs_mode) = mode_submode_to_aocs_mode(mode_submode) {
self.mode = aocs_mode;
}
pub fn handle_requests(&mut self) { if let Some(token) = start_token {
if self.device_state == DeviceState::On { self.verif_helper
if let Ok(request) = self.request_rx.try_recv() { .completion(token)
match request.0 { .expect("error sending completion success");
Request::HkRequest(hk_req) => { }
//self.handle_hk_request(hk_req);
} }
Request::ActionRequest(_action_request) => { ModeRequest::ReadMode => {
//self.handle_action_request(action_request); self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
} }
Request::ModeRequest(_mode_request) => { ModeRequest::AnnounceMode => {
//self.handle_mode_request(mode_request); if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
ModeRequest::AnnounceModeRecursive => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion failure");
}
}
},
Request::ActionRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
} }
} }
} }
} }
} }
fn set_mode_reading(&mut self) {
pub fn handle_mode_request(&mut self, mode_request: ModeRequest) { if self.mode == SendingData {
match mode_request { return;
ModeRequest::SetMode(_) => {}
ModeRequest::ReadMode => {}
ModeRequest::AnnounceMode => {}
ModeRequest::AnnounceModeRecursive => {}
} }
// TODO: currently unrecoverable, should probably be changed
self.power_switcher
.send_switch_on_cmd(self.switch_id)
.expect("error sending switch on cmd");
self.enable_sensor_data_generation()
.expect("error enabling sensor data generation");
self.mode = SendingData;
} }
/* fn set_mode_idle(&mut self) {
pub fn handle_hk_request(&mut self, hk_req: HkRequest) { if self.mode == Idle {
match hk_req { return;
HkRequest::OneShot(_) => {
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[2]).unwrap()).unwrap();
}
HkRequest::Enable(_) => {
if !self.sensor_data_enabled {
self.sensor_data_enabled = true;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[1]).unwrap()).unwrap();
}
}
HkRequest::Disable(_) => {
if self.sensor_data_enabled {
self.sensor_data_enabled = false;
self.can_tx.send(PackageModel::new(PackageId::AOCSDataRequestMGM1, &[0]).unwrap()).unwrap();
}
}
HkRequest::ModifyCollectionInterval(_, _) => {}
} }
self.disable_sensor_data_generation()
.expect("error disabling sensor data generation");
self.power_switcher
.send_switch_off_cmd(self.switch_id)
.expect("error sending switch off cmd");
self.mode = Idle;
} }
*/ fn read_can(&mut self) {
pub fn read_sensor_data(&mut self) {
if let Ok(package) = self.can_rx.try_recv() { if let Ok(package) = self.can_rx.try_recv() {
let mut map = self.aocs_data.lock().expect("error locking data map");
let float_data = self.decode_sensor_data(package.data()); let float_data = self.decode_sensor_data(package.data());
if let Ok(mut mgm_data) = self.mgm_data.lock() { match package.package_id() {
match package.package_id() { PackageId::AOCSDataStarTracker1 => {
PackageId::AOCSDataMGM1 => mgm_data.axis_1 = float_data, map.update_value(STR_QUATERNION_1, AocsDataType::float_value(float_data))
PackageId::AOCSDataMGM2 => mgm_data.axis_2 = float_data, .expect("error updating value");
PackageId::AOCSDataMGM3 => mgm_data.axis_3 = float_data,
_ => {}
} }
PackageId::AOCSDataStarTracker2 => {
map.update_value(STR_QUATERNION_2, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataStarTracker3 => {
map.update_value(STR_QUATERNION_3, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataStarTracker4 => {
map.update_value(STR_QUATERNION_4, AocsDataType::float_value(float_data))
.expect("error updating value");
}
_ => {}
} }
drop(map);
} }
} }
pub fn decode_sensor_data(&mut self, buf: &[u8]) -> f64 { fn decode_sensor_data(&self, buf: &[u8]) -> f64 {
LittleEndian::read_f64(&buf) LittleEndian::read_f64(&buf)
} }
//pub fn handle_action_request(&mut self, action_request: ActionRequest) {}
} }
pub struct AOCSController { pub struct STRHandler {
aocs_housekeeper: AocsHousekeeper, power_switcher: PowerSwitcher,
mgm_handler: MGMHandler, device_id: DeviceId,
switch_id: SwitchId,
device_state: DeviceState,
mode: AOCSSensorMode,
aocs_data: Arc<Mutex<AocsDataMap>>,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>,
verif_helper: VerifHelper,
mode_helper: ModeHelper,
}
impl AocsSensorHandler for STRHandler {
type Error = ();
fn get_package_id(&mut self) -> Result<PackageId, Self::Error> {
Ok(PackageId::AOCSDataRequestStarTracker)
}
fn send_message(&mut self, id: PackageId, buf: &[u8]) -> Result<(), Self::Error> {
self.can_tx
.send(PackageModel::new(id, buf).unwrap())
.unwrap();
return Ok(());
}
}
impl STRHandler {
pub fn new(
power_switcher: PowerSwitcher,
device_id: DeviceId,
device_state: DeviceState,
aocs_data: Arc<Mutex<AocsDataMap>>,
can_tx: Sender<PackageModel>,
can_rx: Receiver<PackageModel>,
request_rx: Receiver<RequestWithToken>,
verif_helper: VerifHelper,
mode_helper: ModeHelper,
) -> Self {
let switch_id = device_id as u16;
Self {
power_switcher,
device_id,
switch_id,
device_state,
mode: Idle,
aocs_data,
can_tx,
can_rx,
request_rx,
verif_helper,
mode_helper,
}
}
pub fn periodic_op(&mut self) {
self.handle_requests();
self.read_can();
}
fn handle_requests(&mut self) {
if let Ok(req) = self.request_rx.try_recv() {
let (req, start_token) = self
.verif_helper
.start_and_unwrap(req)
.expect("error sending start of execution");
match req {
Request::HkRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
Request::ModeRequest(req) => match req {
ModeRequest::SetMode(mode_submode) => {
if let Some(aocs_mode) = mode_submode_to_aocs_mode(mode_submode) {
self.mode = aocs_mode;
}
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
}
ModeRequest::ReadMode => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
}
ModeRequest::AnnounceMode => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
ModeRequest::AnnounceModeRecursive => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion failure");
}
}
},
Request::ActionRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
}
}
}
fn set_mode_reading(&mut self) {
if self.mode == SendingData {
return;
}
// TODO: currently unrecoverable, should probably be changed
self.power_switcher
.send_switch_on_cmd(self.switch_id)
.expect("error sending switch on cmd");
self.enable_sensor_data_generation()
.expect("error enabling sensor data generation");
self.mode = SendingData;
}
fn set_mode_idle(&mut self) {
if self.mode == Idle {
return;
}
self.disable_sensor_data_generation()
.expect("error disabling sensor data generation");
self.power_switcher
.send_switch_off_cmd(self.switch_id)
.expect("error sending switch off cmd");
self.mode = Idle;
}
fn read_can(&mut self) {
if let Ok(package) = self.can_rx.try_recv() {
let mut map = self.aocs_data.lock().expect("error locking data map");
let float_data = self.decode_sensor_data(package.data());
match package.package_id() {
PackageId::AOCSDataMGM1 => {
map.update_value(MGM_VOLTAGE_1, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataMGM2 => {
map.update_value(MGM_VOLTAGE_2, AocsDataType::float_value(float_data))
.expect("error updating value");
}
PackageId::AOCSDataMGM2 => {
map.update_value(MGM_VOLTAGE_3, AocsDataType::float_value(float_data))
.expect("error updating value");
}
_ => {}
}
drop(map);
}
}
fn decode_sensor_data(&self, buf: &[u8]) -> f64 {
LittleEndian::read_f64(&buf)
}
}
pub struct AocsController {
request_rx: Receiver<RequestWithToken>, request_rx: Receiver<RequestWithToken>,
hk_request_tx: Sender<RequestWithToken>,
mgm_request_tx: Sender<RequestWithToken>, mgm_request_tx: Sender<RequestWithToken>,
css_request_tx: Sender<RequestWithToken>,
str_request_tx: Sender<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable,
verif_helper: VerifHelper,
mode_helper: ModeHelper,
mode: AOCSSensorMode,
}
impl AocsController {
pub fn new(
request_rx: Receiver<RequestWithToken>,
mgm_request_tx: Sender<RequestWithToken>,
css_request_tx: Sender<RequestWithToken>,
str_request_tx: Sender<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable,
verif_helper: VerifHelper,
mode_helper: ModeHelper,
) -> Self {
Self {
request_rx,
mgm_request_tx,
css_request_tx,
str_request_tx,
seq_count_provider,
verif_helper,
mode_helper,
mode: Idle,
}
}
pub fn periodic_op(&mut self) {
self.handle_requests();
}
fn handle_requests(&mut self) {
if let Ok(req) = self.request_rx.try_recv() {
let (req, start_token) = self
.verif_helper
.start_and_unwrap(req)
.expect("error sending start of execution");
match req {
Request::HkRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
Request::ModeRequest(req) => match req {
ModeRequest::SetMode(mode_submode) => {
if let Some(aocs_mode) = mode_submode_to_aocs_mode(mode_submode) {
self.mode = aocs_mode;
}
self.mgm_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending to mgm");
self.css_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending to css");
self.str_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending to str");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
}
ModeRequest::ReadMode => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion success");
}
}
ModeRequest::AnnounceMode => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
ModeRequest::AnnounceModeRecursive => {
self.mode_helper
.make_and_send_mode_reply(aocs_mode_to_mode_submode(self.mode))
.expect("error sending mode reply");
self.mgm_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending mode request to mgm handler");
self.css_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending mode request to css handler");
self.str_request_tx
.send(RequestWithToken(Request::ModeRequest(req), None))
.expect("error sending mode request to str handler");
if let Some(token) = start_token {
self.verif_helper
.completion(token)
.expect("error sending completion failure");
}
}
},
Request::ActionRequest(_) => {
if let Some(token) = start_token {
self.verif_helper
.completion_failure(token)
.expect("error sending completion failure");
}
}
}
}
}
}
fn aocs_mode_to_mode_submode(sensor_mode: AOCSSensorMode) -> ModeAndSubmode {
match sensor_mode {
Idle => ModeAndSubmode::new(0, 0),
SendingData => ModeAndSubmode::new(1, 0),
}
}
fn mode_submode_to_aocs_mode(mode_submode: ModeAndSubmode) -> Option<AOCSSensorMode> {
match mode_submode.mode() {
0 => Some(Idle),
1 => Some(SendingData),
_ => None,
}
} }
pub fn core_aocs_loop() {} pub fn core_aocs_loop() {}

157
src/helpers.rs Normal file
View File

@ -0,0 +1,157 @@
use crate::requests::{Request, RequestWithToken};
use crate::tmtc::TmStore;
use eurosim_obsw::tmtc_err;
use satrs_core::mode::ModeAndSubmode;
use satrs_core::pool::StoreAddr;
use satrs_core::pus::mode::Subservice::TmModeReply;
use satrs_core::pus::verification::{
FailParams, TcStateAccepted, TcStateStarted, VerificationReporterWithSender, VerificationToken,
};
use satrs_core::pus::MpscPusInStoreSendError;
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
use satrs_core::spacepackets::tc::PusTc;
use satrs_core::spacepackets::time::cds::TimeProvider;
use satrs_core::spacepackets::time::TimeWriter;
use satrs_core::spacepackets::tm::{PusTm, PusTmSecondaryHeader};
use satrs_core::spacepackets::SpHeader;
use std::sync::mpsc::Sender;
#[derive(Clone)]
pub struct VerifHelper {
verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>,
time_stamp_buf: [u8; 7],
}
impl VerifHelper {
pub fn new(verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>) -> Self {
Self {
verif_reporter,
time_stamp_buf: [0; 7],
}
}
pub fn start_and_unwrap(
&mut self,
request_with_token: RequestWithToken,
) -> Result<(Request, Option<VerificationToken<TcStateStarted>>), ()> {
match request_with_token.1 {
None => Ok((request_with_token.0, None)),
Some(token) => {
self.update_time_stamp();
if let Ok(start_token) = self //implement this for verification
.verif_reporter
.start_success(token, Some(&self.time_stamp_buf))
{
return Ok((request_with_token.0, Some(start_token)));
} else {
return Err(());
}
}
}
}
pub fn start_failure(&mut self, request_with_token: RequestWithToken) -> Result<(), ()> {
if let Some(token) = request_with_token.1 {
self.update_time_stamp();
if let Ok(()) = self.verif_reporter.start_failure(
token,
FailParams::new(
Some(&mut self.time_stamp_buf),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
) {
return Ok(());
} else {
return Err(());
}
}
Ok(())
}
pub fn completion(&mut self, start_token: VerificationToken<TcStateStarted>) -> Result<(), ()> {
self.update_time_stamp();
if let Ok(()) = self
.verif_reporter
.completion_success(start_token, Some(&self.time_stamp_buf))
{
return Ok(());
} else {
return Err(());
}
}
pub fn completion_failure(
&mut self,
start_token: VerificationToken<TcStateStarted>,
) -> Result<(), ()> {
self.update_time_stamp();
if let Ok(()) = self.verif_reporter.completion_failure(
start_token,
FailParams::new(
Some(&mut self.time_stamp_buf),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
) {
return Ok(());
} else {
return Err(());
}
}
fn update_time_stamp(&mut self) {
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut self.time_stamp_buf).unwrap();
}
}
pub struct ModeHelper {
apid: u16,
seq_count_provider: SeqCountProviderSyncClonable,
tm_store: TmStore,
tm_funnel_tx: Sender<StoreAddr>,
buf: [u8; 6],
time_stamp_buf: [u8; 7],
}
impl ModeHelper {
pub fn new(
apid: u16,
seq_count_provider: SeqCountProviderSyncClonable,
tm_store: TmStore,
tm_funnel_tx: Sender<StoreAddr>,
) -> Self {
Self {
apid,
seq_count_provider,
tm_store,
tm_funnel_tx,
buf: [0; 6],
time_stamp_buf: [0; 7],
}
}
fn update_time_stamp(&mut self) {
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut self.time_stamp_buf).unwrap();
}
pub fn make_and_send_mode_reply(
&mut self,
mode_submode: ModeAndSubmode,
) -> Result<(), std::sync::mpsc::SendError<StoreAddr>> {
self.update_time_stamp();
let mut sp_header =
SpHeader::tm_unseg(self.apid, self.seq_count_provider.get_and_increment(), 0).unwrap();
self.buf[0..4].copy_from_slice(&mode_submode.mode().to_be_bytes());
self.buf[4..6].copy_from_slice(&mode_submode.submode().to_be_bytes());
let mut len = 6;
let data = self.buf[0..len].to_vec();
let tm_sec_header =
PusTmSecondaryHeader::new_simple(200, TmModeReply as u8, &self.time_stamp_buf);
let tm = PusTm::new(&mut sp_header, tm_sec_header, Some(&self.buf[0..len]), true);
let addr = self.tm_store.add_pus_tm(&tm);
self.tm_funnel_tx.send(addr)
}
}

185
src/hk.rs
View File

@ -1,13 +1,14 @@
use std::collections::HashMap; use std::collections::HashMap;
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
use crate::aocs::{CSSData, MGMData};
use crate::hk::AocsDataType::float_value; use crate::hk::AocsDataType::float_value;
use crate::requests::Request; use crate::requests::Request;
use crate::requests::RequestWithToken; use crate::requests::RequestWithToken;
use crate::tmtc::{TmStore, AOCS_HK_APID}; use crate::tmtc::{TmStore, AOCS_HK_APID};
use byteorder::{ByteOrder, LittleEndian}; use byteorder::{BigEndian, ByteOrder, LittleEndian};
use chrono::Duration;
use eurosim_obsw::hk_err; use eurosim_obsw::hk_err;
use log::debug;
use num_enum::FromPrimitive; use num_enum::FromPrimitive;
use satrs_core::hk::{HkRequest, UniqueId}; use satrs_core::hk::{HkRequest, UniqueId};
use satrs_core::pool::StoreAddr; use satrs_core::pool::StoreAddr;
@ -15,12 +16,13 @@ use satrs_core::pus::hk::Subservice;
use satrs_core::pus::verification::{FailParams, VerificationReporterWithSender}; use satrs_core::pus::verification::{FailParams, VerificationReporterWithSender};
use satrs_core::pus::MpscPusInStoreSendError; use satrs_core::pus::MpscPusInStoreSendError;
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore}; use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
use satrs_core::spacepackets::time::cds::TimeProvider; use satrs_core::spacepackets::time::cds::{CdsCommon, TimeProvider};
use satrs_core::spacepackets::time::TimeWriter; use satrs_core::spacepackets::time::{CcsdsTimeProvider, TimeWriter};
use satrs_core::spacepackets::tm::{PusTm, PusTmSecondaryHeader}; use satrs_core::spacepackets::tm::{PusTm, PusTmSecondaryHeader};
use satrs_core::spacepackets::SpHeader; use satrs_core::spacepackets::SpHeader;
use satrs_core::tmtc::AddressableId; use satrs_core::tmtc::AddressableId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::from_slice;
use std::ops::Deref; use std::ops::Deref;
use std::sync::mpsc::{Receiver, Sender}; use std::sync::mpsc::{Receiver, Sender};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@ -42,6 +44,8 @@ pub const STR_QUATERNION_2: UniqueId = 11;
pub const STR_QUATERNION_3: UniqueId = 12; pub const STR_QUATERNION_3: UniqueId = 12;
pub const STR_QUATERNION_4: UniqueId = 13; pub const STR_QUATERNION_4: UniqueId = 13;
pub const AOCS_HK_NUM_OF_ELEMENTS: usize = 13 * 8;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum AocsHkIds { pub enum AocsHkIds {
TestAocsSet = 1, TestAocsSet = 1,
@ -85,6 +89,24 @@ impl AocsDataMap {
pub fn get_value(&self, id: UniqueId) -> Option<&AocsDataType> { pub fn get_value(&self, id: UniqueId) -> Option<&AocsDataType> {
self.map.get(&id) self.map.get(&id)
} }
pub fn all_values_as_bytes(&self) -> [u8; AOCS_HK_NUM_OF_ELEMENTS] {
let mut buf: [u8; AOCS_HK_NUM_OF_ELEMENTS] = [0; AOCS_HK_NUM_OF_ELEMENTS];
let mut i = 0;
let map = self.map.clone();
let mut map_as_vec = map.keys().collect::<Vec<&UniqueId>>();
map_as_vec.sort();
for element in map_as_vec {
match map.get(element).unwrap() {
float_value(val) => {
let val_as_byte = BigEndian::write_f64(&mut (buf[i..i + 8]), *val);
}
}
i += 8;
}
debug!("{:?}", buf);
buf
}
} }
#[derive(Debug, EnumIter, PartialEq, Copy, Clone)] #[derive(Debug, EnumIter, PartialEq, Copy, Clone)]
@ -92,52 +114,6 @@ pub enum AocsDataType {
float_value(f64), float_value(f64),
} }
#[derive(Serialize, Deserialize)]
pub struct AOCSSensorData {
mgm_data: MGMData, // Voltage for 3 axis
css_data: CSSData, // Voltage for 18 sun sensors
str_data: [f64; 4], // Quaternion for position of satellite
}
impl AOCSSensorData {
pub fn new() -> AOCSSensorData {
let mgm_data = MGMData::default();
let css_data = CSSData::default();
let str_data = [0.0; 4];
AOCSSensorData {
mgm_data,
css_data,
str_data,
}
}
pub fn update_mgm_data(&mut self, mgm_data: &Arc<Mutex<MGMData>>) {
let data = mgm_data.lock().unwrap();
self.mgm_data = *data;
}
pub fn update_css_data(&mut self, css_data: &Arc<Mutex<CSSData>>) {
let data = css_data.lock().unwrap();
self.css_data = *data;
}
pub fn write_str_data(&mut self, str_data: [f64; 4]) {
self.str_data = str_data;
}
pub fn get_mgm_data(&mut self) -> MGMData {
self.mgm_data
}
pub fn read_css_data(&mut self) -> CSSData {
self.css_data
}
pub fn read_str_data(&mut self) -> [f64; 4] {
self.str_data
}
}
pub struct AocsHousekeeper { pub struct AocsHousekeeper {
data_map: Arc<Mutex<AocsDataMap>>, data_map: Arc<Mutex<AocsDataMap>>,
id_list: Vec<UniqueId>, id_list: Vec<UniqueId>,
@ -147,6 +123,9 @@ pub struct AocsHousekeeper {
aocs_tm_funnel_tx: Sender<StoreAddr>, aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>, verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>,
periodic_hk_ids: Option<Vec<UniqueId>>, periodic_hk_ids: Option<Vec<UniqueId>>,
periodic_on: bool,
prev_time_step: TimeProvider,
collection_interval: Duration,
} }
impl AocsHousekeeper { impl AocsHousekeeper {
@ -183,9 +162,61 @@ impl AocsHousekeeper {
aocs_tm_funnel_tx, aocs_tm_funnel_tx,
verif_reporter, verif_reporter,
periodic_hk_ids: None, periodic_hk_ids: None,
periodic_on: false,
prev_time_step: TimeProvider::from_now_with_u16_days().unwrap(),
collection_interval: Duration::seconds(0),
} }
} }
pub fn new_with_collection_interval(
sensor_data_pool: Arc<Mutex<AocsDataMap>>,
request_rx: Receiver<RequestWithToken>,
seq_count_provider: SeqCountProviderSyncClonable,
aocs_tm_store: TmStore,
aocs_tm_funnel_tx: Sender<StoreAddr>,
verif_reporter: VerificationReporterWithSender<MpscPusInStoreSendError>,
collection_interval: Duration,
) -> AocsHousekeeper {
let id_list = vec![
MGM_VOLTAGE_1,
MGM_VOLTAGE_2,
MGM_VOLTAGE_3,
CSS_VOLTAGE_1,
CSS_VOLTAGE_2,
CSS_VOLTAGE_3,
CSS_VOLTAGE_4,
CSS_VOLTAGE_5,
CSS_VOLTAGE_6,
STR_QUATERNION_1,
STR_QUATERNION_2,
STR_QUATERNION_3,
STR_QUATERNION_4,
];
AocsHousekeeper {
data_map: sensor_data_pool,
id_list,
request_rx,
seq_count_provider,
aocs_tm_store,
aocs_tm_funnel_tx,
verif_reporter,
periodic_hk_ids: None,
periodic_on: false,
prev_time_step: TimeProvider::from_now_with_u16_days().unwrap(),
collection_interval,
}
}
pub fn periodic_op(&mut self) {
self.handle_hk_request();
self.periodic_hk();
}
// housekeeper has methods for both sending individual variables as tm as well as entire aocs data set
// individual values results in way too much traffic, therefore is not used
// the functions for it are kept here since the logic could be easily adapted to send different sets (something which is currently not implemented)
// for now, ..._individual functions are rather unnecessary
pub fn handle_hk_request(&mut self) { pub fn handle_hk_request(&mut self) {
let mut time_stamp_buf: [u8; 7] = [0; 7]; let mut time_stamp_buf: [u8; 7] = [0; 7];
@ -198,9 +229,9 @@ impl AocsHousekeeper {
.start_success(request_with_token.1.unwrap(), Some(&time_stamp_buf)) .start_success(request_with_token.1.unwrap(), Some(&time_stamp_buf))
.expect("Error sending start success"); .expect("Error sending start success");
if let Ok(()) = match hk_req { if let Ok(()) = match hk_req {
HkRequest::OneShot(id) => self.one_shot_hk(id), HkRequest::OneShot(id) => self.one_shot(),
HkRequest::Enable(id) => self.enable_hk(id), HkRequest::Enable(id) => self.enable_periodic(),
HkRequest::Disable(id) => self.disable_hk(id), HkRequest::Disable(id) => self.disable_periodic(),
HkRequest::ModifyCollectionInterval(_id, _collection_interval) => Ok(()), HkRequest::ModifyCollectionInterval(_id, _collection_interval) => Ok(()),
} { } {
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap(); let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
@ -226,7 +257,24 @@ impl AocsHousekeeper {
} }
} }
pub fn check_period(&mut self) -> bool {
let current_time = TimeProvider::from_now_with_u16_days().unwrap();
let prev_time = self.prev_time_step.clone();
let delta = current_time.date_time().unwrap() - prev_time.date_time().unwrap();
delta > self.collection_interval
}
pub fn periodic_hk(&mut self) { pub fn periodic_hk(&mut self) {
if self.periodic_on && self.check_period() {
self.prev_time_step = TimeProvider::from_now_with_u16_days().unwrap();
let map = self.data_map.lock().unwrap();
let mut data_as_bytes = map.all_values_as_bytes();
drop(map);
self.send_hk_packet(1, &mut data_as_bytes);
}
}
pub fn periodic_hk_individual(&mut self) {
//let json_string = self.aocs_data_to_str(); //let json_string = self.aocs_data_to_str();
let data = self.data_map.lock().unwrap(); let data = self.data_map.lock().unwrap();
let data_copy = data.clone(); let data_copy = data.clone();
@ -242,7 +290,16 @@ impl AocsHousekeeper {
} }
} }
pub fn one_shot_hk(&mut self, id: UniqueId) -> Result<(), ()> { pub fn one_shot(&mut self) -> Result<(), ()> {
let map = self.data_map.lock().unwrap();
let data_as_bytes = map.all_values_as_bytes();
drop(map);
// currently gives 1 as id, meaning one is the spid, if multiple spids in this housekeeper, this needs to change!
self.send_hk_packet(1, &data_as_bytes);
Ok(())
}
pub fn one_shot_hk_individual(&mut self, id: UniqueId) -> Result<(), ()> {
let data = self.data_map.lock().unwrap(); let data = self.data_map.lock().unwrap();
let data_copy = data.clone(); let data_copy = data.clone();
drop(data); drop(data);
@ -255,7 +312,12 @@ impl AocsHousekeeper {
Err(()) Err(())
} }
pub fn enable_hk(&mut self, id: UniqueId) -> Result<(), ()> { pub fn enable_periodic(&mut self) -> Result<(), ()> {
self.periodic_on = true;
Ok(())
}
pub fn enable_periodic_individual(&mut self, id: UniqueId) -> Result<(), ()> {
if !self.id_list.contains(&id) { if !self.id_list.contains(&id) {
return Err(()); return Err(());
} }
@ -271,7 +333,12 @@ impl AocsHousekeeper {
Ok(()) Ok(())
} }
pub fn disable_hk(&mut self, id: UniqueId) -> Result<(), ()> { pub fn disable_periodic(&mut self) -> Result<(), ()> {
self.periodic_on = false;
Ok(())
}
pub fn disable_periodic_individual(&mut self, id: UniqueId) -> Result<(), ()> {
if !self.id_list.contains(&id) { if !self.id_list.contains(&id) {
return Err(()); return Err(());
} }
@ -333,11 +400,13 @@ impl AocsHousekeeper {
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap(); let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();
cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap(); cds_stamp.write_to_bytes(&mut time_stamp_buf).unwrap();
huge_buf[0..4].copy_from_slice(&id.to_be_bytes()); huge_buf[0..4].copy_from_slice(&id.to_be_bytes());
let mut len = 4; let mut len = 8;
huge_buf[8..data.len() + 8].copy_from_slice(data); huge_buf[len..data.len() + 8].copy_from_slice(data);
len += data.len(); len += data.len();
let data = huge_buf[0..len].to_vec();
let tm_sec_header = let tm_sec_header =
PusTmSecondaryHeader::new_simple(3, Subservice::TmHkPacket as u8, &time_stamp_buf); PusTmSecondaryHeader::new_simple(3, Subservice::TmHkPacket as u8, &time_stamp_buf);
sp_header.data_len = len as u16;
let hk_tm = PusTm::new(&mut sp_header, tm_sec_header, Some(&huge_buf[0..len]), true); let hk_tm = PusTm::new(&mut sp_header, tm_sec_header, Some(&huge_buf[0..len]), true);
let addr = self.aocs_tm_store.add_pus_tm(&hk_tm); let addr = self.aocs_tm_store.add_pus_tm(&hk_tm);
self.aocs_tm_funnel_tx.send(addr).expect("sending failed"); self.aocs_tm_funnel_tx.send(addr).expect("sending failed");

View File

@ -9,6 +9,7 @@ mod cam;
mod can; mod can;
mod can_ids; mod can_ids;
mod ccsds; mod ccsds;
mod helpers;
mod hk; mod hk;
mod logger; mod logger;
mod messages; mod messages;
@ -43,6 +44,7 @@ use strum::IntoEnumIterator;
use crate::can_ids::{ use crate::can_ids::{
load_package_id_to_apids, load_package_id_to_threads, DeviceId, PackageModel, ThreadId, load_package_id_to_apids, load_package_id_to_threads, DeviceId, PackageModel, ThreadId,
}; };
use chrono::Duration;
use log::info; use log::info;
use satrs_core::power::{SwitchId, SwitchState}; use satrs_core::power::{SwitchId, SwitchState};
use std::collections::HashMap; use std::collections::HashMap;
@ -51,10 +53,11 @@ use std::sync::mpsc::channel;
use std::sync::{mpsc, Arc, Mutex, RwLock}; use std::sync::{mpsc, Arc, Mutex, RwLock};
use std::thread; use std::thread;
//use libc::time64_t; //use libc::time64_t;
use crate::aocs::{core_aocs_loop, MGMData, MGMHandler}; use crate::aocs::{core_aocs_loop, AocsController, MGMHandler};
#[cfg(feature = "can")] #[cfg(feature = "can")]
use crate::can::CanTxHandler; use crate::can::CanTxHandler;
use crate::hk::{AOCSSensorData, AocsDataMap, AocsHousekeeper}; use crate::helpers::{ModeHelper, VerifHelper};
use crate::hk::{AocsDataMap, AocsDataType, AocsHousekeeper};
use crate::pld_handler::core_pld_task; use crate::pld_handler::core_pld_task;
use crate::power_handler::{core_power_task, PowerSwitcher}; use crate::power_handler::{core_power_task, PowerSwitcher};
@ -87,7 +90,7 @@ fn main() {
let seq_count_provider = SeqCountProviderSyncClonable::default(); let seq_count_provider = SeqCountProviderSyncClonable::default();
let msg_count_provider = SeqCountProviderSyncClonable::default(); let msg_count_provider = SeqCountProviderSyncClonable::default();
let aocs_seq_count_provider = seq_count_provider.clone(); let aocs_seq_count_provider = SeqCountProviderSyncClonable::default();
let verif_seq_count_provider = seq_count_provider.clone(); let verif_seq_count_provider = seq_count_provider.clone();
let tmtc_seq_count_provider = seq_count_provider.clone(); let tmtc_seq_count_provider = seq_count_provider.clone();
@ -159,7 +162,7 @@ fn main() {
// Create clones here to allow moving the values // Create clones here to allow moving the values
let core_args = OtherArgs { let core_args = OtherArgs {
sock_addr, sock_addr,
verif_reporter, verif_reporter: verif_reporter.clone(),
event_sender, event_sender,
event_request_tx, event_request_tx,
request_map, request_map,
@ -293,27 +296,84 @@ fn main() {
let aocs_tm_funnel_tx = tm_funnel_tx.clone(); let aocs_tm_funnel_tx = tm_funnel_tx.clone();
let aocs_tm_store = tm_store.clone(); let aocs_tm_store = tm_store.clone();
let mgm_shared_data: Arc<Mutex<MGMData>> = Arc::default(); let mut aocs_data_not_threadsafe = AocsDataMap::new();
aocs_data_not_threadsafe
let aocs_data = Arc::new(Mutex::new(AocsDataMap::new())); .update_value(1, AocsDataType::float_value(1.0))
let (mgm_action_tx, mgm_action_rx) = channel::<RequestWithToken>(); .unwrap();
aocs_data_not_threadsafe
.update_value(2, AocsDataType::float_value(2.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(3, AocsDataType::float_value(3.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(4, AocsDataType::float_value(4.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(5, AocsDataType::float_value(5.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(6, AocsDataType::float_value(6.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(7, AocsDataType::float_value(7.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(8, AocsDataType::float_value(8.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(9, AocsDataType::float_value(9.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(10, AocsDataType::float_value(10.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(11, AocsDataType::float_value(11.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(12, AocsDataType::float_value(12.0))
.unwrap();
aocs_data_not_threadsafe
.update_value(13, AocsDataType::float_value(13.0))
.unwrap();
let aocs_data = Arc::new(Mutex::new(aocs_data_not_threadsafe));
let power_switcher_aocs = power_switcher.clone(); let power_switcher_aocs = power_switcher.clone();
info!("Starting AOCS task"); info!("Starting AOCS task");
let builder5 = thread::Builder::new().name("AOCSThread".into()); let builder5 = thread::Builder::new().name("AOCSThread".into());
let jh5 = builder5.spawn(move || { let jh5 = builder5.spawn(move || {
let mut aocs_housekeeper = AocsHousekeeper::new( let mut aocs_housekeeper = AocsHousekeeper::new_with_collection_interval(
aocs_data, aocs_data.clone(),
aocs_hk_rx, aocs_hk_rx,
aocs_seq_count_provider, aocs_seq_count_provider.clone(),
aocs_tm_store, aocs_tm_store.clone(),
aocs_tm_funnel_tx, aocs_tm_funnel_tx.clone(),
reporter_aocs, reporter_aocs.clone(),
Duration::seconds(1),
);
let aocs_controller_verif_helper = VerifHelper::new(verif_reporter.clone());
let aocs_controller_mode_helper = ModeHelper::new(
AOCS_APID,
aocs_seq_count_provider.clone(),
aocs_tm_store.clone(),
aocs_tm_funnel_tx.clone(),
);
let mut aocs_controller = AocsController::new(
aocs_thread_rx,
mgm_tx,
css_tx,
str_tx,
aocs_seq_count_provider.clone(),
aocs_controller_verif_helper,
aocs_controller_mode_helper,
); );
loop { loop {
aocs_housekeeper.handle_hk_request(); aocs_housekeeper.periodic_op();
aocs_controller.periodic_op();
} }
/*let mut mgm_handler = MGMHandler::new( /*let mut mgm_handler = MGMHandler::new(
power_switcher_aocs.clone(), power_switcher_aocs.clone(),

View File

@ -257,6 +257,8 @@ pub fn core_pld_task(
debug!("{:?}", camera_handler.get_mode()); debug!("{:?}", camera_handler.get_mode());
while camera_handler.get_mode() != CameraMode::Idle { while camera_handler.get_mode() != CameraMode::Idle {
camera_handler.periodic_op(); camera_handler.periodic_op();
debug!("{:?}", camera_handler.get_mode());
sleep(Duration::from_millis(1000));
} }
let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap(); let cds_stamp = TimeProvider::from_now_with_u16_days().unwrap();

View File

@ -21,6 +21,7 @@ use satrs_core::pus::verification::{
}; };
use satrs_core::pus::{event, GenericTcCheckError}; use satrs_core::pus::{event, GenericTcCheckError};
use satrs_core::res_code::ResultU16; use satrs_core::res_code::ResultU16;
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
use satrs_core::spacepackets::ecss::{scheduling, PusServiceId}; use satrs_core::spacepackets::ecss::{scheduling, PusServiceId};
use satrs_core::spacepackets::CcsdsPacket; use satrs_core::spacepackets::CcsdsPacket;
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper; use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
@ -35,7 +36,6 @@ use std::convert::TryFrom;
use std::hash::Hash; use std::hash::Hash;
use std::rc::Rc; use std::rc::Rc;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
use satrs_core::seq_count::{SeqCountProviderSyncClonable, SequenceCountProviderCore};
pub struct PusReceiver { pub struct PusReceiver {
pub tm_helper: PusTmWithCdsShortHelper, pub tm_helper: PusTmWithCdsShortHelper,
@ -178,7 +178,23 @@ impl PusServiceProvider for PusReceiver {
} }
} { } {
if let Some(sender) = self.tc_args.apid_map.clone().get(&apid) { if let Some(sender) = self.tc_args.apid_map.clone().get(&apid) {
sender.send(request).expect("error sending request"); info!("sending request to apid: {}", apid);
sender
.send(request)
.expect(&format!("error sending request to apid {}", apid));
} else {
let token = request.1.unwrap();
self.tm_args
.verif_reporter
.start_failure(
token,
FailParams::new(
Some(self.stamp_helper.stamp()),
&tmtc_err::INVALID_PUS_SUBSERVICE,
None,
),
)
.expect("error sending start failure");
} }
} }
Ok(()) Ok(())
@ -200,7 +216,12 @@ impl PusReceiver {
.verif_reporter .verif_reporter
.start_success(token, Some(self.stamp_helper.stamp())) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("Error sending start success"); .expect("Error sending start success");
let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(17, 2, None, self.tm_args.seq_count_provider.get()); let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(
17,
2,
None,
self.tm_args.seq_count_provider.get(),
);
let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply); let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply);
self.tm_args self.tm_args
.tm_tx .tm_tx
@ -306,7 +327,7 @@ impl PusReceiver {
Some(HkRequest::Enable(unique_id)) Some(HkRequest::Enable(unique_id))
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableHkGeneration as u8 { } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableHkGeneration as u8 {
//send_request(HkRequest::Disable(addressable_id)); //send_request(HkRequest::Disable(addressable_id));
Some(HkRequest::Enable(unique_id)) Some(HkRequest::Disable(unique_id))
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 { } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 {
//send_request(HkRequest::OneShot(addressable_id)); //send_request(HkRequest::OneShot(addressable_id));
Some(HkRequest::OneShot(unique_id)) Some(HkRequest::OneShot(unique_id))
@ -560,7 +581,7 @@ impl PusReceiver {
if app_data.is_some() { if app_data.is_some() {
app_data_len = pus_tc.user_data().unwrap().len(); app_data_len = pus_tc.user_data().unwrap().len();
} }
if app_data_len < 4 { /*if app_data_len < 4 {
self.tm_args self.tm_args
.verif_reporter .verif_reporter
.start_failure( .start_failure(
@ -573,6 +594,8 @@ impl PusReceiver {
) )
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
} }
*/
let app_data = app_data.unwrap(); let app_data = app_data.unwrap();
let mut invalid_subservice_handler = || { let mut invalid_subservice_handler = || {
self.tm_args self.tm_args
@ -589,6 +612,7 @@ impl PusReceiver {
}; };
let subservice = mode::Subservice::try_from(PusPacket::subservice(pus_tc)); let subservice = mode::Subservice::try_from(PusPacket::subservice(pus_tc));
if let Ok(subservice) = subservice { if let Ok(subservice) = subservice {
/*
let forward_mode_request = |target_id, mode_request: ModeRequest| match self let forward_mode_request = |target_id, mode_request: ModeRequest| match self
.tc_args .tc_args
.request_map .request_map
@ -604,11 +628,12 @@ impl PusReceiver {
.expect("sending mode request failed"); .expect("sending mode request failed");
} }
}; };
*/
let mut valid_subservice = true; let mut valid_subservice = true;
match subservice { let request = match subservice {
Subservice::TcSetMode => { Subservice::TcSetMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); //let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
let min_len = ModeAndSubmode::raw_len() + 4; let min_len = ModeAndSubmode::raw_len();
if app_data_len < min_len { if app_data_len < min_len {
self.tm_args self.tm_args
.verif_reporter .verif_reporter
@ -627,37 +652,56 @@ impl PusReceiver {
} }
// Should never fail after size check // Should never fail after size check
let mode_submode = ModeAndSubmode::from_be_bytes( let mode_submode = ModeAndSubmode::from_be_bytes(
app_data[4..4 + ModeAndSubmode::raw_len()] app_data[0..ModeAndSubmode::raw_len()].try_into().unwrap(),
.try_into()
.unwrap(),
) )
.unwrap(); .unwrap();
forward_mode_request(target_id, ModeRequest::SetMode(mode_submode)); info!("{:?}", mode_submode);
//forward_mode_request(target_id, ModeRequest::SetMode(mode_submode));
Some(RequestWithToken(
Request::ModeRequest(ModeRequest::SetMode(mode_submode)),
Some(token),
))
} }
Subservice::TcReadMode => { Subservice::TcReadMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); //let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::ReadMode); //forward_mode_request(target_id, ModeRequest::ReadMode);
Some(RequestWithToken(
Request::ModeRequest(ModeRequest::ReadMode),
Some(token),
))
} }
Subservice::TcAnnounceMode => { Subservice::TcAnnounceMode => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); //let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceMode); //forward_mode_request(target_id, ModeRequest::AnnounceMode);
Some(RequestWithToken(
Request::ModeRequest(ModeRequest::AnnounceMode),
Some(token),
))
} }
Subservice::TcAnnounceModeRecursive => { Subservice::TcAnnounceModeRecursive => {
let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap()); //let target_id = u32::from_be_bytes(app_data[0..4].try_into().unwrap());
forward_mode_request(target_id, ModeRequest::AnnounceModeRecursive); //forward_mode_request(target_id, ModeRequest::AnnounceModeRecursive);
Some(RequestWithToken(
Request::ModeRequest(ModeRequest::AnnounceModeRecursive),
Some(token),
))
} }
_ => { _ => {
warn!("Can not process mode request with subservice {subservice:?}"); warn!("Can not process mode request with subservice {subservice:?}");
invalid_subservice_handler(); invalid_subservice_handler();
valid_subservice = false; valid_subservice = false;
None
} }
} };
if valid_subservice { return request;
/*if valid_subservice {
self.tm_args self.tm_args
.verif_reporter .verif_reporter
.start_success(token, Some(self.stamp_helper.stamp())) .start_success(token, Some(self.stamp_helper.stamp()))
.expect("sending start success TM failed"); .expect("sending start success TM failed");
} }
*/
} else { } else {
invalid_subservice_handler(); invalid_subservice_handler();
} }