Various fixes and updates
- Make test event work - Improved pyclient capabilities and split it up a bit
This commit is contained in:
parent
9da051973c
commit
58544fac7a
2
satrs-example/pyclient/.gitignore
vendored
2
satrs-example/pyclient/.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
__pycache__
|
||||
|
||||
/venv
|
||||
/log
|
||||
/.idea/*
|
||||
|
50
satrs-example/pyclient/common.py
Normal file
50
satrs-example/pyclient/common.py
Normal file
@ -0,0 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import struct
|
||||
|
||||
EXAMPLE_PUS_APID = 0x02
|
||||
|
||||
|
||||
class EventSeverity(enum.IntEnum):
|
||||
INFO = (0,)
|
||||
LOW = (1,)
|
||||
MEDIUM = (2,)
|
||||
HIGH = 3
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class EventU32:
|
||||
severity: EventSeverity
|
||||
group_id: int
|
||||
unique_id: int
|
||||
|
||||
@classmethod
|
||||
def unpack(cls, data: bytes) -> EventU32:
|
||||
if len(data) < 4:
|
||||
raise ValueError("passed data too short")
|
||||
event_raw = struct.unpack("!I", data[0:4])[0]
|
||||
return cls(
|
||||
severity=EventSeverity((event_raw >> 30) & 0b11),
|
||||
group_id=(event_raw >> 16) & 0x3FFF,
|
||||
unique_id=event_raw & 0xFFFF,
|
||||
)
|
||||
|
||||
|
||||
class RequestTargetId(enum.IntEnum):
|
||||
ACS = 1
|
||||
|
||||
|
||||
class AcsHkIds(enum.IntEnum):
|
||||
MGM_SET = 1
|
||||
|
||||
|
||||
class HkOpCodes:
|
||||
GENERATE_ONE_SHOT = ["0", "oneshot"]
|
||||
|
||||
|
||||
def make_addressable_id(target_id: int, unique_id: int) -> bytes:
|
||||
byte_string = bytearray(struct.pack("!I", target_id))
|
||||
byte_string.extend(struct.pack("!I", unique_id))
|
||||
return byte_string
|
@ -1,22 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Example client for the sat-rs example application"""
|
||||
import enum
|
||||
import logging
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
from typing import Optional
|
||||
import datetime
|
||||
|
||||
import tmtccmd
|
||||
from spacepackets.ecss import PusTelemetry, PusTelecommand, PusVerificator
|
||||
from spacepackets.ecss import PusTelemetry, PusVerificator
|
||||
from spacepackets.ecss.pus_17_test import Service17Tm
|
||||
from spacepackets.ecss.pus_1_verification import UnpackParams, Service1Tm
|
||||
from spacepackets.ccsds.time import CdsShortTimestamp
|
||||
|
||||
from tmtccmd import CcsdsTmtcBackend, TcHandlerBase, ProcedureParamsWrapper
|
||||
from tmtccmd.tc.pus_3_fsfw_hk import generate_one_hk_command, make_sid
|
||||
from tmtccmd.tc.pus_11_tc_sched import create_time_tagged_cmd
|
||||
from tmtccmd.core.base import BackendRequest
|
||||
from tmtccmd.pus import VerificationWrapper
|
||||
from tmtccmd.tm import CcsdsTmHandler, SpecificApidHandlerBase
|
||||
@ -26,8 +22,6 @@ from tmtccmd.config import (
|
||||
SetupParams,
|
||||
HookBase,
|
||||
TmtcDefinitionWrapper,
|
||||
CoreServiceList,
|
||||
OpCodeEntry,
|
||||
params_to_procedure_conversion,
|
||||
)
|
||||
from tmtccmd.config import PreArgsParsingWrapper, SetupWrapper
|
||||
@ -46,15 +40,16 @@ from tmtccmd.tc import (
|
||||
DefaultPusQueueHelper,
|
||||
QueueWrapper,
|
||||
)
|
||||
from tmtccmd.tm.pus_5_fsfw_event import Service5Tm
|
||||
from tmtccmd.util import FileSeqCountProvider, PusFileSeqCountProvider
|
||||
from tmtccmd.util.obj_id import ObjectIdDictT
|
||||
|
||||
from tmtccmd.util.tmtc_printer import FsfwTmTcPrinter
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
import pus_tc
|
||||
import tc_definitions
|
||||
from common import EXAMPLE_PUS_APID, EventSeverity, EventU32
|
||||
|
||||
EXAMPLE_PUS_APID = 0x02
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SatRsConfigHook(HookBase):
|
||||
@ -75,38 +70,7 @@ class SatRsConfigHook(HookBase):
|
||||
return create_com_interface_default(cfg)
|
||||
|
||||
def get_tmtc_definitions(self) -> TmtcDefinitionWrapper:
|
||||
from tmtccmd.config.globals import get_default_tmtc_defs
|
||||
|
||||
defs = get_default_tmtc_defs()
|
||||
srv_5 = OpCodeEntry()
|
||||
srv_5.add("0", "Event Test")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_5.value,
|
||||
info="PUS Service 5 Event",
|
||||
op_code_entry=srv_5,
|
||||
)
|
||||
srv_17 = OpCodeEntry()
|
||||
srv_17.add("0", "Ping Test")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_17_ALT,
|
||||
info="PUS Service 17 Test",
|
||||
op_code_entry=srv_17,
|
||||
)
|
||||
srv_3 = OpCodeEntry()
|
||||
srv_3.add(HkOpCodes.GENERATE_ONE_SHOT, "Generate AOCS one shot HK")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_3,
|
||||
info="PUS Service 3 Housekeeping",
|
||||
op_code_entry=srv_3,
|
||||
)
|
||||
srv_11 = OpCodeEntry()
|
||||
srv_11.add("0", "Scheduled TC Test")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_11,
|
||||
info="PUS Service 11 TC Scheduling",
|
||||
op_code_entry=srv_11,
|
||||
)
|
||||
return defs
|
||||
return tc_definitions.tc_definitions()
|
||||
|
||||
def perform_mode_operation(self, tmtc_backend: CcsdsTmtcBackend, mode: int):
|
||||
_LOGGER.info("Mode operation hook was called")
|
||||
@ -168,7 +132,14 @@ class PusHandler(SpecificApidHandlerBase):
|
||||
json_str = pus_tm.source_data[8:]
|
||||
dedicated_handler = True
|
||||
if service == 5:
|
||||
tm_packet = Service5Tm.unpack(packet, time_reader=CdsShortTimestamp.empty())
|
||||
tm_packet = PusTelemetry.unpack(
|
||||
packet, time_reader=CdsShortTimestamp.empty()
|
||||
)
|
||||
src_data = tm_packet.source_data
|
||||
event_u32 = EventU32.unpack(src_data)
|
||||
_LOGGER.info(f"Received event packet. Event: {event_u32}")
|
||||
if event_u32.group_id == 0 and event_u32.unique_id == 0:
|
||||
_LOGGER.info("Received test event")
|
||||
if service == 17:
|
||||
tm_packet = Service17Tm.unpack(
|
||||
packet, time_reader=CdsShortTimestamp.empty()
|
||||
@ -197,24 +168,6 @@ class PusHandler(SpecificApidHandlerBase):
|
||||
# self.printer.handle_long_tm_print(packet_if=tm_packet, info_if=tm_packet)
|
||||
|
||||
|
||||
def make_addressable_id(target_id: int, unique_id: int) -> bytes:
|
||||
byte_string = bytearray(struct.pack("!I", target_id))
|
||||
byte_string.extend(struct.pack("!I", unique_id))
|
||||
return byte_string
|
||||
|
||||
|
||||
class RequestTargetId(enum.IntEnum):
|
||||
ACS = 1
|
||||
|
||||
|
||||
class AcsHkIds(enum.IntEnum):
|
||||
MGM_SET = 1
|
||||
|
||||
|
||||
class HkOpCodes:
|
||||
GENERATE_ONE_SHOT = ["0", "oneshot"]
|
||||
|
||||
|
||||
class TcHandler(TcHandlerBase):
|
||||
def __init__(
|
||||
self,
|
||||
@ -259,33 +212,7 @@ class TcHandler(TcHandlerBase):
|
||||
def_proc = helper.to_def_procedure()
|
||||
service = def_proc.service
|
||||
op_code = def_proc.op_code
|
||||
if (
|
||||
service == CoreServiceList.SERVICE_17
|
||||
or service == CoreServiceList.SERVICE_17_ALT
|
||||
):
|
||||
q.add_log_cmd("Sending PUS ping telecommand")
|
||||
return q.add_pus_tc(PusTelecommand(service=17, subservice=1))
|
||||
if service == CoreServiceList.SERVICE_11:
|
||||
q.add_log_cmd("Sending PUS scheduled TC telecommand")
|
||||
crt_time = CdsShortTimestamp.from_now()
|
||||
time_stamp = crt_time + datetime.timedelta(seconds=10)
|
||||
time_stamp = time_stamp.pack()
|
||||
return q.add_pus_tc(
|
||||
create_time_tagged_cmd(
|
||||
time_stamp,
|
||||
PusTelecommand(service=17, subservice=1),
|
||||
apid=EXAMPLE_PUS_APID,
|
||||
)
|
||||
)
|
||||
if service == CoreServiceList.SERVICE_3:
|
||||
if op_code in HkOpCodes.GENERATE_ONE_SHOT:
|
||||
q.add_log_cmd("Sending HK one shot request")
|
||||
q.add_pus_tc(
|
||||
generate_one_hk_command(
|
||||
make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET)
|
||||
)
|
||||
)
|
||||
pass
|
||||
pus_tc.pack_pus_telecommands(q, service, op_code)
|
||||
|
||||
|
||||
def main():
|
||||
|
50
satrs-example/pyclient/pus_tc.py
Normal file
50
satrs-example/pyclient/pus_tc.py
Normal file
@ -0,0 +1,50 @@
|
||||
import datetime
|
||||
|
||||
from spacepackets.ccsds import CdsShortTimestamp
|
||||
from spacepackets.ecss import PusTelecommand
|
||||
from tmtccmd.config import CoreServiceList
|
||||
from tmtccmd.tc import DefaultPusQueueHelper
|
||||
from tmtccmd.tc.pus_11_tc_sched import create_time_tagged_cmd
|
||||
from tmtccmd.tc.pus_3_fsfw_hk import create_request_one_hk_command
|
||||
|
||||
from common import (
|
||||
EXAMPLE_PUS_APID,
|
||||
HkOpCodes,
|
||||
make_addressable_id,
|
||||
RequestTargetId,
|
||||
AcsHkIds,
|
||||
)
|
||||
|
||||
|
||||
def pack_pus_telecommands(q: DefaultPusQueueHelper, service: str, op_code: str):
|
||||
if (
|
||||
service == CoreServiceList.SERVICE_17
|
||||
or service == CoreServiceList.SERVICE_17_ALT
|
||||
):
|
||||
if op_code == "ping":
|
||||
q.add_log_cmd("Sending PUS ping telecommand")
|
||||
return q.add_pus_tc(PusTelecommand(service=17, subservice=1))
|
||||
elif op_code == "trigger_event":
|
||||
q.add_log_cmd("Triggering test event")
|
||||
return q.add_pus_tc(PusTelecommand(service=17, subservice=128))
|
||||
if service == CoreServiceList.SERVICE_11:
|
||||
q.add_log_cmd("Sending PUS scheduled TC telecommand")
|
||||
crt_time = CdsShortTimestamp.from_now()
|
||||
time_stamp = crt_time + datetime.timedelta(seconds=10)
|
||||
time_stamp = time_stamp.pack()
|
||||
return q.add_pus_tc(
|
||||
create_time_tagged_cmd(
|
||||
time_stamp,
|
||||
PusTelecommand(service=17, subservice=1),
|
||||
apid=EXAMPLE_PUS_APID,
|
||||
)
|
||||
)
|
||||
if service == CoreServiceList.SERVICE_3:
|
||||
if op_code in HkOpCodes.GENERATE_ONE_SHOT:
|
||||
q.add_log_cmd("Sending HK one shot request")
|
||||
q.add_pus_tc(
|
||||
create_request_one_hk_command(
|
||||
make_addressable_id(RequestTargetId.ACS, AcsHkIds.MGM_SET)
|
||||
)
|
||||
)
|
||||
pass
|
0
satrs-example/pyclient/pus_tm.py
Normal file
0
satrs-example/pyclient/pus_tm.py
Normal file
38
satrs-example/pyclient/tc_definitions.py
Normal file
38
satrs-example/pyclient/tc_definitions.py
Normal file
@ -0,0 +1,38 @@
|
||||
from tmtccmd.config import OpCodeEntry, TmtcDefinitionWrapper, CoreServiceList
|
||||
from tmtccmd.config.globals import get_default_tmtc_defs
|
||||
|
||||
from common import HkOpCodes
|
||||
|
||||
|
||||
def tc_definitions() -> TmtcDefinitionWrapper:
|
||||
defs = get_default_tmtc_defs()
|
||||
srv_5 = OpCodeEntry()
|
||||
srv_5.add("0", "Event Test")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_5.value,
|
||||
info="PUS Service 5 Event",
|
||||
op_code_entry=srv_5,
|
||||
)
|
||||
srv_17 = OpCodeEntry()
|
||||
srv_17.add("ping", "Ping Test")
|
||||
srv_17.add("trigger_event", "Trigger Event")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_17_ALT,
|
||||
info="PUS Service 17 Test",
|
||||
op_code_entry=srv_17,
|
||||
)
|
||||
srv_3 = OpCodeEntry()
|
||||
srv_3.add(HkOpCodes.GENERATE_ONE_SHOT, "Generate AOCS one shot HK")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_3,
|
||||
info="PUS Service 3 Housekeeping",
|
||||
op_code_entry=srv_3,
|
||||
)
|
||||
srv_11 = OpCodeEntry()
|
||||
srv_11.add("0", "Scheduled TC Test")
|
||||
defs.add_service(
|
||||
name=CoreServiceList.SERVICE_11,
|
||||
info="PUS Service 11 TC Scheduling",
|
||||
op_code_entry=srv_11,
|
||||
)
|
||||
return defs
|
@ -4,7 +4,7 @@ pub fn setup_logger() -> Result<(), fern::InitError> {
|
||||
out.finish(format_args!(
|
||||
"{}[{}][{}] {}",
|
||||
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
|
||||
std::thread::current().name().expect("unnamed_thread"),
|
||||
record.target(), //(std::thread::current().name().expect("unnamed_thread"),
|
||||
record.level(),
|
||||
message
|
||||
))
|
||||
|
@ -109,23 +109,34 @@ fn main() {
|
||||
8,
|
||||
)
|
||||
.unwrap();
|
||||
// Every software component which needs to generate verification telemetry, gets a cloned
|
||||
// verification reporter.
|
||||
let verif_reporter = VerificationReporterWithSender::new(&verif_cfg, Box::new(verif_sender));
|
||||
let mut reporter_event_handler = verif_reporter.clone();
|
||||
let mut reporter_aocs = verif_reporter.clone();
|
||||
|
||||
// Create event handling components
|
||||
// These sender handles are used to send event requests, for example to enable or disable
|
||||
// certain events
|
||||
let (event_request_tx, event_request_rx) = channel::<EventRequestWithToken>();
|
||||
// The sender handle is the primary sender handle for all components which want to create events.
|
||||
// The event manager will receive the RX handle to receive all the events.
|
||||
let (event_sender, event_man_rx) = channel();
|
||||
let event_recv = MpscEventReceiver::<EventU32>::new(event_man_rx);
|
||||
let mut event_man = EventManagerWithMpscQueue::new(Box::new(event_recv));
|
||||
|
||||
// All events sent to the manager are routed to the PUS event manager, which generates PUS event
|
||||
// telemetry for each event.
|
||||
let event_reporter = EventReporter::new(PUS_APID, 128).unwrap();
|
||||
let pus_tm_backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
|
||||
let mut pus_event_dispatcher =
|
||||
PusEventDispatcher::new(event_reporter, Box::new(pus_tm_backend));
|
||||
let (pus_event_man_tx, pus_event_man_rx) = channel();
|
||||
let pus_event_man_send_provider = MpscEventU32SendProvider::new(1, pus_event_man_tx);
|
||||
let mut reporter_event_handler = verif_reporter.clone();
|
||||
let mut reporter_aocs = verif_reporter.clone();
|
||||
event_man.subscribe_all(pus_event_man_send_provider.id());
|
||||
event_man.add_sender(pus_event_man_send_provider);
|
||||
|
||||
// Some request are targetable. This map is used to retrieve sender handles based on a target ID.
|
||||
let mut request_map = HashMap::new();
|
||||
let (acs_thread_tx, acs_thread_rx) = channel::<RequestWithToken>();
|
||||
request_map.insert(RequestTargetId::AcsSubsystem as u32, acs_thread_tx);
|
||||
@ -157,120 +168,151 @@ fn main() {
|
||||
let mut aocs_tm_store = tm_store.clone();
|
||||
|
||||
info!("Starting TMTC task");
|
||||
let jh0 = thread::spawn(move || {
|
||||
core_tmtc_task(core_args, tc_args, tm_args);
|
||||
});
|
||||
let jh0 = thread::Builder::new()
|
||||
.name("TMTC".to_string())
|
||||
.spawn(move || {
|
||||
core_tmtc_task(core_args, tc_args, tm_args);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting TM funnel task");
|
||||
let jh1 = thread::spawn(move || {
|
||||
let tm_funnel = TmFunnel {
|
||||
tm_server_tx,
|
||||
tm_funnel_rx,
|
||||
};
|
||||
loop {
|
||||
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
|
||||
tm_funnel
|
||||
.tm_server_tx
|
||||
.send(addr)
|
||||
.expect("Sending TM to server failed");
|
||||
let jh1 = thread::Builder::new()
|
||||
.name("TM Funnel".to_string())
|
||||
.spawn(move || {
|
||||
let tm_funnel = TmFunnel {
|
||||
tm_server_tx,
|
||||
tm_funnel_rx,
|
||||
};
|
||||
loop {
|
||||
if let Ok(addr) = tm_funnel.tm_funnel_rx.recv() {
|
||||
tm_funnel
|
||||
.tm_server_tx
|
||||
.send(addr)
|
||||
.expect("Sending TM to server failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting event handling task");
|
||||
let jh2 = thread::spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut sender = EventTmSender::new(tm_store, tm_funnel_tx);
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
|
||||
reporter_event_handler
|
||||
.completion_success(event_req.token, Some(timestamp))
|
||||
.expect("Sending completion success failed");
|
||||
};
|
||||
loop {
|
||||
if let Ok(event_req) = event_request_rx.try_recv() {
|
||||
match event_req.request {
|
||||
EventRequest::Enable(event) => {
|
||||
pus_event_dispatcher
|
||||
.enable_tm_for_event(&event)
|
||||
.expect("Enabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
EventRequest::Disable(event) => {
|
||||
pus_event_dispatcher
|
||||
.disable_tm_for_event(&event)
|
||||
.expect("Disabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Ok((event, _param)) = pus_event_man_rx.try_recv() {
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
pus_event_dispatcher
|
||||
.generate_pus_event_tm_generic(&mut sender, ×tamp, event, None)
|
||||
.expect("Sending TM as event failed");
|
||||
}
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
}
|
||||
});
|
||||
|
||||
info!("Starting AOCS thread");
|
||||
let jh3 = thread::spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
loop {
|
||||
match acs_thread_rx.try_recv() {
|
||||
Ok(request) => {
|
||||
info!("ACS thread: Received HK request {:?}", request.0);
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
match request.0 {
|
||||
Request::HkRequest(hk_req) => match hk_req {
|
||||
HkRequest::OneShot(address) => {
|
||||
assert_eq!(address.target_id, RequestTargetId::AcsSubsystem as u32);
|
||||
if address.unique_id == AcsHkIds::TestMgmSet as u32 {
|
||||
let mut sp_header =
|
||||
SpHeader::tm(PUS_APID, SequenceFlags::Unsegmented, 0, 0)
|
||||
.unwrap();
|
||||
let sec_header = PusTmSecondaryHeader::new_simple(
|
||||
3,
|
||||
HkSubservice::TmHkPacket as u8,
|
||||
×tamp,
|
||||
);
|
||||
let mut buf: [u8; 8] = [0; 8];
|
||||
address.write_to_be_bytes(&mut buf).unwrap();
|
||||
let pus_tm =
|
||||
PusTm::new(&mut sp_header, sec_header, Some(&buf), true);
|
||||
let addr = aocs_tm_store.add_pus_tm(&pus_tm);
|
||||
aocs_to_funnel.send(addr).expect("Sending HK TM failed");
|
||||
}
|
||||
}
|
||||
HkRequest::Enable(_) => {}
|
||||
HkRequest::Disable(_) => {}
|
||||
HkRequest::ModifyCollectionInterval(_, _) => {}
|
||||
},
|
||||
Request::ModeRequest(_mode_req) => {
|
||||
warn!("mode request handling not implemented yet")
|
||||
let jh2 = thread::Builder::new()
|
||||
.name("Event".to_string())
|
||||
.spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut sender = EventTmSender::new(tm_store, tm_funnel_tx);
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
|
||||
reporter_event_handler
|
||||
.completion_success(event_req.token, Some(timestamp))
|
||||
.expect("Sending completion success failed");
|
||||
};
|
||||
loop {
|
||||
// handle event requests
|
||||
if let Ok(event_req) = event_request_rx.try_recv() {
|
||||
match event_req.request {
|
||||
EventRequest::Enable(event) => {
|
||||
pus_event_dispatcher
|
||||
.enable_tm_for_event(&event)
|
||||
.expect("Enabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
EventRequest::Disable(event) => {
|
||||
pus_event_dispatcher
|
||||
.disable_tm_for_event(&event)
|
||||
.expect("Disabling TM failed");
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
report_completion(event_req, ×tamp);
|
||||
}
|
||||
}
|
||||
let started_token = reporter_aocs
|
||||
.start_success(request.1, Some(×tamp))
|
||||
.expect("Sending start success failed");
|
||||
reporter_aocs
|
||||
.completion_success(started_token, Some(×tamp))
|
||||
.expect("Sending completion success failed");
|
||||
}
|
||||
Err(e) => match e {
|
||||
TryRecvError::Empty => {}
|
||||
TryRecvError::Disconnected => {
|
||||
warn!("ACS thread: Message Queue TX disconnected!")
|
||||
}
|
||||
},
|
||||
|
||||
// Perform the event routing.
|
||||
event_man
|
||||
.try_event_handling()
|
||||
.expect("event handling failed");
|
||||
|
||||
// Perform the generation of PUS event packets
|
||||
if let Ok((event, _param)) = pus_event_man_rx.try_recv() {
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
pus_event_dispatcher
|
||||
.generate_pus_event_tm_generic(&mut sender, ×tamp, event, None)
|
||||
.expect("Sending TM as event failed");
|
||||
}
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
}
|
||||
thread::sleep(Duration::from_millis(500));
|
||||
}
|
||||
});
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info!("Starting AOCS thread");
|
||||
let jh3 = thread::Builder::new()
|
||||
.name("AOCS".to_string())
|
||||
.spawn(move || {
|
||||
let mut timestamp: [u8; 7] = [0; 7];
|
||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||
loop {
|
||||
match acs_thread_rx.try_recv() {
|
||||
Ok(request) => {
|
||||
info!("ACS thread: Received HK request {:?}", request.0);
|
||||
update_time(&mut time_provider, &mut timestamp);
|
||||
match request.0 {
|
||||
Request::HkRequest(hk_req) => match hk_req {
|
||||
HkRequest::OneShot(address) => {
|
||||
assert_eq!(
|
||||
address.target_id,
|
||||
RequestTargetId::AcsSubsystem as u32
|
||||
);
|
||||
if address.unique_id == AcsHkIds::TestMgmSet as u32 {
|
||||
let mut sp_header = SpHeader::tm(
|
||||
PUS_APID,
|
||||
SequenceFlags::Unsegmented,
|
||||
0,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
let sec_header = PusTmSecondaryHeader::new_simple(
|
||||
3,
|
||||
HkSubservice::TmHkPacket as u8,
|
||||
×tamp,
|
||||
);
|
||||
let mut buf: [u8; 8] = [0; 8];
|
||||
address.write_to_be_bytes(&mut buf).unwrap();
|
||||
let pus_tm = PusTm::new(
|
||||
&mut sp_header,
|
||||
sec_header,
|
||||
Some(&buf),
|
||||
true,
|
||||
);
|
||||
let addr = aocs_tm_store.add_pus_tm(&pus_tm);
|
||||
aocs_to_funnel.send(addr).expect("Sending HK TM failed");
|
||||
}
|
||||
}
|
||||
HkRequest::Enable(_) => {}
|
||||
HkRequest::Disable(_) => {}
|
||||
HkRequest::ModifyCollectionInterval(_, _) => {}
|
||||
},
|
||||
Request::ModeRequest(_mode_req) => {
|
||||
warn!("mode request handling not implemented yet")
|
||||
}
|
||||
}
|
||||
let started_token = reporter_aocs
|
||||
.start_success(request.1, Some(×tamp))
|
||||
.expect("Sending start success failed");
|
||||
reporter_aocs
|
||||
.completion_success(started_token, Some(×tamp))
|
||||
.expect("Sending completion success failed");
|
||||
}
|
||||
Err(e) => match e {
|
||||
TryRecvError::Empty => {}
|
||||
TryRecvError::Disconnected => {
|
||||
warn!("ACS thread: Message Queue TX disconnected!")
|
||||
}
|
||||
},
|
||||
}
|
||||
thread::sleep(Duration::from_millis(500));
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
jh0.join().expect("Joining UDP TMTC server thread failed");
|
||||
jh1.join().expect("Joining TM Funnel thread failed");
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::requests::{Request, RequestWithToken};
|
||||
use crate::tmtc::{PusTcSource, TmStore};
|
||||
use log::warn;
|
||||
use log::{info, warn};
|
||||
use satrs_core::events::EventU32;
|
||||
use satrs_core::hk::{CollectionIntervalFactor, HkRequest};
|
||||
use satrs_core::mode::{ModeAndSubmode, ModeCommand, ModeRequest};
|
||||
@ -168,8 +168,8 @@ impl PusReceiver {
|
||||
fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
||||
match PusPacket::subservice(pus_tc) {
|
||||
1 => {
|
||||
println!("Received PUS ping command TC[17,1]");
|
||||
println!("Sending ping reply PUS TM[17,2]");
|
||||
info!("Received PUS ping command TC[17,1]");
|
||||
info!("Sending ping reply PUS TM[17,2]");
|
||||
let start_token = self
|
||||
.tm_args
|
||||
.verif_reporter
|
||||
@ -187,6 +187,7 @@ impl PusReceiver {
|
||||
.expect("Error sending completion success");
|
||||
}
|
||||
128 => {
|
||||
info!("Generating test event");
|
||||
self.tc_args
|
||||
.event_sender
|
||||
.send((TEST_EVENT.into(), None))
|
||||
|
@ -1,3 +1,4 @@
|
||||
use log::info;
|
||||
use satrs_core::events::EventU32;
|
||||
use satrs_core::hal::host::udp_server::{ReceiveResult, UdpTcServer};
|
||||
use satrs_core::params::Params;
|
||||
@ -240,7 +241,7 @@ fn core_tmtc_loop(
|
||||
scheduler.update_time_from_now().unwrap();
|
||||
if let Ok(released_tcs) = scheduler.release_telecommands(releaser, pool.as_mut()) {
|
||||
if released_tcs > 0 {
|
||||
println!("{released_tcs} TC(s) released from scheduler");
|
||||
info!("{released_tcs} TC(s) released from scheduler");
|
||||
}
|
||||
}
|
||||
drop(pool);
|
||||
@ -315,9 +316,9 @@ fn core_tm_handling(udp_tmtc_server: &mut UdpTmtcServer, recv_addr: &SocketAddr)
|
||||
if buf.len() > 9 {
|
||||
let service = buf[7];
|
||||
let subservice = buf[8];
|
||||
println!("Sending PUS TM[{service},{subservice}]")
|
||||
info!("Sending PUS TM[{service},{subservice}]")
|
||||
} else {
|
||||
println!("Sending PUS TM");
|
||||
info!("Sending PUS TM");
|
||||
}
|
||||
udp_tmtc_server
|
||||
.udp_tc_server
|
||||
|
Loading…
x
Reference in New Issue
Block a user