continue
This commit is contained in:
parent
363770066d
commit
52a7682a55
@ -10,7 +10,7 @@ use hashbrown::HashSet;
|
|||||||
|
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
pub use crate::pus::event::EventReporter;
|
pub use crate::pus::event::EventReporter;
|
||||||
use crate::pus::verification::{TcStateStarted, VerificationToken};
|
use crate::pus::verification::TcStateToken;
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
use crate::pus::EcssTmSenderCore;
|
use crate::pus::EcssTmSenderCore;
|
||||||
use crate::pus::EcssTmtcErrorWithSend;
|
use crate::pus::EcssTmtcErrorWithSend;
|
||||||
@ -91,7 +91,7 @@ pub enum EventRequest<Event: GenericEvent = EventU32> {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct EventRequestWithToken<Event: GenericEvent = EventU32> {
|
pub struct EventRequestWithToken<Event: GenericEvent = EventU32> {
|
||||||
pub request: EventRequest<Event>,
|
pub request: EventRequest<Event>,
|
||||||
pub token: VerificationToken<TcStateStarted>,
|
pub token: TcStateToken,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
144
satrs-core/src/pus/event_srv.rs
Normal file
144
satrs-core/src/pus/event_srv.rs
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
use crate::events::EventU32;
|
||||||
|
use crate::pool::{SharedPool, StoreAddr};
|
||||||
|
use crate::pus::event_man::{EventRequest, EventRequestWithToken};
|
||||||
|
use crate::pus::verification::{
|
||||||
|
StdVerifReporterWithSender, TcStateAccepted, TcStateToken, VerificationToken,
|
||||||
|
};
|
||||||
|
use crate::pus::{
|
||||||
|
AcceptedTc, PartialPusHandlingError, PusPacketHandlerResult, PusPacketHandlingError,
|
||||||
|
PusServiceBase, PusServiceHandler,
|
||||||
|
};
|
||||||
|
use crate::tmtc::tm_helper::SharedTmStore;
|
||||||
|
use spacepackets::ecss::event::Subservice;
|
||||||
|
use spacepackets::ecss::PusPacket;
|
||||||
|
use spacepackets::tc::PusTc;
|
||||||
|
use std::format;
|
||||||
|
use std::sync::mpsc::{Receiver, Sender};
|
||||||
|
|
||||||
|
pub struct PusService5EventHandler {
|
||||||
|
psb: PusServiceBase,
|
||||||
|
event_request_tx: Sender<EventRequestWithToken>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PusService5EventHandler {
|
||||||
|
pub fn new(
|
||||||
|
receiver: Receiver<AcceptedTc>,
|
||||||
|
tc_pool: SharedPool,
|
||||||
|
tm_tx: Sender<StoreAddr>,
|
||||||
|
tm_store: SharedTmStore,
|
||||||
|
tm_apid: u16,
|
||||||
|
verification_handler: StdVerifReporterWithSender,
|
||||||
|
event_request_tx: Sender<EventRequestWithToken>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
psb: PusServiceBase::new(
|
||||||
|
receiver,
|
||||||
|
tc_pool,
|
||||||
|
tm_tx,
|
||||||
|
tm_store,
|
||||||
|
tm_apid,
|
||||||
|
verification_handler,
|
||||||
|
),
|
||||||
|
event_request_tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PusServiceHandler for PusService5EventHandler {
|
||||||
|
fn psb_mut(&mut self) -> &mut PusServiceBase {
|
||||||
|
&mut self.psb
|
||||||
|
}
|
||||||
|
fn psb(&self) -> &PusServiceBase {
|
||||||
|
&self.psb
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_one_tc(
|
||||||
|
&mut self,
|
||||||
|
addr: StoreAddr,
|
||||||
|
token: VerificationToken<TcStateAccepted>,
|
||||||
|
) -> Result<PusPacketHandlerResult, PusPacketHandlingError> {
|
||||||
|
{
|
||||||
|
// Keep locked section as short as possible.
|
||||||
|
let mut tc_pool = self
|
||||||
|
.psb
|
||||||
|
.tc_store
|
||||||
|
.write()
|
||||||
|
.map_err(|e| PusPacketHandlingError::RwGuardError(format!("{e}")))?;
|
||||||
|
let tc_guard = tc_pool.read_with_guard(addr);
|
||||||
|
let tc_raw = tc_guard.read().unwrap();
|
||||||
|
self.psb.pus_buf[0..tc_raw.len()].copy_from_slice(tc_raw);
|
||||||
|
}
|
||||||
|
let (tc, _) = PusTc::from_bytes(&self.psb.pus_buf).unwrap();
|
||||||
|
let srv = Subservice::try_from(tc.subservice());
|
||||||
|
if srv.is_err() {
|
||||||
|
return Ok(PusPacketHandlerResult::CustomSubservice(
|
||||||
|
tc.subservice(),
|
||||||
|
token,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let mut handle_enable_disable_request = |enable: bool| {
|
||||||
|
if tc.user_data().is_none() || tc.user_data().unwrap().len() < 4 {
|
||||||
|
return Err(PusPacketHandlingError::NotEnoughAppData(
|
||||||
|
"At least 4 bytes event ID expected".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let user_data = tc.user_data().unwrap();
|
||||||
|
let event_u32 = EventU32::from(u32::from_be_bytes(user_data[0..4].try_into().unwrap()));
|
||||||
|
|
||||||
|
let start_token = self
|
||||||
|
.psb
|
||||||
|
.verification_handler
|
||||||
|
.start_success(token, Some(&self.psb.stamp_buf))
|
||||||
|
.map_err(|_| PartialPusHandlingError::VerificationError);
|
||||||
|
let partial_error = start_token.clone().err();
|
||||||
|
let mut token: TcStateToken = token.into();
|
||||||
|
if let Ok(start_token) = start_token {
|
||||||
|
token = start_token.into();
|
||||||
|
}
|
||||||
|
let event_req_with_token = if enable {
|
||||||
|
EventRequestWithToken {
|
||||||
|
request: EventRequest::Enable(event_u32),
|
||||||
|
token,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
EventRequestWithToken {
|
||||||
|
request: EventRequest::Disable(event_u32),
|
||||||
|
token,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
self.event_request_tx
|
||||||
|
.send(event_req_with_token)
|
||||||
|
.map_err(|_| {
|
||||||
|
PusPacketHandlingError::SendError("Forwarding event request failed".into())
|
||||||
|
})?;
|
||||||
|
if let Some(partial_error) = partial_error {
|
||||||
|
return Ok(PusPacketHandlerResult::RequestHandledPartialSuccess(
|
||||||
|
partial_error,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(PusPacketHandlerResult::RequestHandled)
|
||||||
|
};
|
||||||
|
match srv.unwrap() {
|
||||||
|
Subservice::TmInfoReport
|
||||||
|
| Subservice::TmLowSeverityReport
|
||||||
|
| Subservice::TmMediumSeverityReport
|
||||||
|
| Subservice::TmHighSeverityReport => {
|
||||||
|
return Err(PusPacketHandlingError::InvalidSubservice(tc.subservice()))
|
||||||
|
}
|
||||||
|
Subservice::TcEnableEventGeneration => {
|
||||||
|
handle_enable_disable_request(true)?;
|
||||||
|
}
|
||||||
|
Subservice::TcDisableEventGeneration => {
|
||||||
|
handle_enable_disable_request(false)?;
|
||||||
|
}
|
||||||
|
Subservice::TcReportDisabledList | Subservice::TmDisabledEventsReport => {
|
||||||
|
return Ok(PusPacketHandlerResult::SubserviceNotImplemented(
|
||||||
|
tc.subservice(),
|
||||||
|
token,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PusPacketHandlerResult::RequestHandled)
|
||||||
|
}
|
||||||
|
}
|
@ -11,6 +11,7 @@ use spacepackets::{ByteConversionError, SizeMissmatch};
|
|||||||
|
|
||||||
pub mod event;
|
pub mod event;
|
||||||
pub mod event_man;
|
pub mod event_man;
|
||||||
|
pub mod event_srv;
|
||||||
pub mod hk;
|
pub mod hk;
|
||||||
pub mod mode;
|
pub mod mode;
|
||||||
pub mod scheduler;
|
pub mod scheduler;
|
||||||
@ -289,13 +290,17 @@ pub mod std_mod {
|
|||||||
PusError(#[from] PusError),
|
PusError(#[from] PusError),
|
||||||
#[error("Wrong service number {0} for packet handler")]
|
#[error("Wrong service number {0} for packet handler")]
|
||||||
WrongService(u8),
|
WrongService(u8),
|
||||||
|
#[error("Invalid subservice {0}")]
|
||||||
|
InvalidSubservice(u8),
|
||||||
#[error("Not enough application data available: {0}")]
|
#[error("Not enough application data available: {0}")]
|
||||||
NotEnoughAppData(String),
|
NotEnoughAppData(String),
|
||||||
#[error("Generic store error: {0}")]
|
#[error("Generic store error: {0}")]
|
||||||
StoreError(#[from] StoreError),
|
StoreError(#[from] StoreError),
|
||||||
#[error("Error with the pool RwGuard")]
|
#[error("Error with the pool RwGuard: {0}")]
|
||||||
RwGuardError(String),
|
RwGuardError(String),
|
||||||
#[error("MQ backend disconnect error")]
|
#[error("MQ send error: {0}")]
|
||||||
|
SendError(String),
|
||||||
|
#[error("TX message queue side has disconnected")]
|
||||||
QueueDisconnected,
|
QueueDisconnected,
|
||||||
#[error("Other error {0}")]
|
#[error("Other error {0}")]
|
||||||
OtherError(String),
|
OtherError(String),
|
||||||
@ -315,6 +320,7 @@ pub mod std_mod {
|
|||||||
pub enum PusPacketHandlerResult {
|
pub enum PusPacketHandlerResult {
|
||||||
RequestHandled,
|
RequestHandled,
|
||||||
RequestHandledPartialSuccess(PartialPusHandlingError),
|
RequestHandledPartialSuccess(PartialPusHandlingError),
|
||||||
|
SubserviceNotImplemented(u8, VerificationToken<TcStateAccepted>),
|
||||||
CustomSubservice(u8, VerificationToken<TcStateAccepted>),
|
CustomSubservice(u8, VerificationToken<TcStateAccepted>),
|
||||||
Empty,
|
Empty,
|
||||||
}
|
}
|
||||||
|
@ -227,6 +227,17 @@ impl From<VerificationToken<TcStateNone>> for TcStateToken {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFrom<TcStateToken> for VerificationToken<TcStateAccepted> {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn try_from(value: TcStateToken) -> Result<Self, Self::Error> {
|
||||||
|
if let TcStateToken::Accepted(token) = value {
|
||||||
|
Ok(token)
|
||||||
|
} else {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
impl From<VerificationToken<TcStateAccepted>> for TcStateToken {
|
impl From<VerificationToken<TcStateAccepted>> for TcStateToken {
|
||||||
fn from(t: VerificationToken<TcStateAccepted>) -> Self {
|
fn from(t: VerificationToken<TcStateAccepted>) -> Self {
|
||||||
TcStateToken::Accepted(t)
|
TcStateToken::Accepted(t)
|
||||||
|
@ -9,6 +9,7 @@ use log::{info, warn};
|
|||||||
|
|
||||||
use crate::hk::AcsHkIds;
|
use crate::hk::AcsHkIds;
|
||||||
use crate::logging::setup_logger;
|
use crate::logging::setup_logger;
|
||||||
|
use crate::pus::event::Pus5Wrapper;
|
||||||
use crate::pus::scheduler::Pus11Wrapper;
|
use crate::pus::scheduler::Pus11Wrapper;
|
||||||
use crate::pus::test::Service17CustomWrapper;
|
use crate::pus::test::Service17CustomWrapper;
|
||||||
use crate::pus::PusTcMpscRouter;
|
use crate::pus::PusTcMpscRouter;
|
||||||
@ -26,6 +27,7 @@ use satrs_core::pus::event_man::{
|
|||||||
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken,
|
DefaultPusMgmtBackendProvider, EventReporter, EventRequest, EventRequestWithToken,
|
||||||
PusEventDispatcher,
|
PusEventDispatcher,
|
||||||
};
|
};
|
||||||
|
use satrs_core::pus::event_srv::PusService5EventHandler;
|
||||||
use satrs_core::pus::hk::Subservice as HkSubservice;
|
use satrs_core::pus::hk::Subservice as HkSubservice;
|
||||||
use satrs_core::pus::scheduler::PusScheduler;
|
use satrs_core::pus::scheduler::PusScheduler;
|
||||||
use satrs_core::pus::scheduler_srv::PusService11SchedHandler;
|
use satrs_core::pus::scheduler_srv::PusService11SchedHandler;
|
||||||
@ -141,7 +143,7 @@ fn main() {
|
|||||||
sock_addr,
|
sock_addr,
|
||||||
verif_reporter: verif_reporter.clone(),
|
verif_reporter: verif_reporter.clone(),
|
||||||
event_sender,
|
event_sender,
|
||||||
event_request_tx,
|
// event_request_tx,
|
||||||
request_map,
|
request_map,
|
||||||
seq_count_provider: seq_count_provider_tmtc,
|
seq_count_provider: seq_count_provider_tmtc,
|
||||||
};
|
};
|
||||||
@ -184,16 +186,26 @@ fn main() {
|
|||||||
};
|
};
|
||||||
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
let scheduler = PusScheduler::new_with_current_init_time(Duration::from_secs(5))
|
||||||
.expect("Creating PUS Scheduler failed");
|
.expect("Creating PUS Scheduler failed");
|
||||||
let pus11_handler = PusService11SchedHandler::new(
|
let pus_11_handler = PusService11SchedHandler::new(
|
||||||
pus_sched_rx,
|
pus_sched_rx,
|
||||||
tc_store.pool.clone(),
|
tc_store.pool.clone(),
|
||||||
tm_funnel_tx.clone(),
|
tm_funnel_tx.clone(),
|
||||||
tm_store.clone(),
|
tm_store.clone(),
|
||||||
PUS_APID,
|
PUS_APID,
|
||||||
verif_reporter,
|
verif_reporter.clone(),
|
||||||
scheduler,
|
scheduler,
|
||||||
);
|
);
|
||||||
let mut pus_11_wrapper = Pus11Wrapper { pus11_handler };
|
let mut pus_11_wrapper = Pus11Wrapper { pus_11_handler };
|
||||||
|
let pus_5_handler = PusService5EventHandler::new(
|
||||||
|
pus_event_rx,
|
||||||
|
tc_store.pool.clone(),
|
||||||
|
tm_funnel_tx.clone(),
|
||||||
|
tm_store.clone(),
|
||||||
|
PUS_APID,
|
||||||
|
verif_reporter,
|
||||||
|
event_request_tx,
|
||||||
|
);
|
||||||
|
let mut pus_5_wrapper = Pus5Wrapper { pus_5_handler };
|
||||||
|
|
||||||
info!("Starting TMTC task");
|
info!("Starting TMTC task");
|
||||||
let jh0 = thread::Builder::new()
|
let jh0 = thread::Builder::new()
|
||||||
@ -236,7 +248,7 @@ fn main() {
|
|||||||
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
let mut time_provider = TimeProvider::new_with_u16_days(0, 0);
|
||||||
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
|
let mut report_completion = |event_req: EventRequestWithToken, timestamp: &[u8]| {
|
||||||
reporter_event_handler
|
reporter_event_handler
|
||||||
.completion_success(event_req.token, Some(timestamp))
|
.completion_success(event_req.token.try_into().unwrap(), Some(timestamp))
|
||||||
.expect("Sending completion success failed");
|
.expect("Sending completion success failed");
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
@ -366,6 +378,7 @@ fn main() {
|
|||||||
};
|
};
|
||||||
is_srv_finished(pus_17_wrapper.perform_operation());
|
is_srv_finished(pus_17_wrapper.perform_operation());
|
||||||
is_srv_finished(pus_11_wrapper.perform_operation());
|
is_srv_finished(pus_11_wrapper.perform_operation());
|
||||||
|
is_srv_finished(pus_5_wrapper.perform_operation());
|
||||||
if all_queues_empty {
|
if all_queues_empty {
|
||||||
thread::sleep(Duration::from_millis(200));
|
thread::sleep(Duration::from_millis(200));
|
||||||
}
|
}
|
||||||
|
33
satrs-example/src/pus/event.rs
Normal file
33
satrs-example/src/pus/event.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use log::{error, warn};
|
||||||
|
use satrs_core::pus::event_srv::PusService5EventHandler;
|
||||||
|
use satrs_core::pus::{PusPacketHandlerResult, PusServiceHandler};
|
||||||
|
|
||||||
|
pub struct Pus5Wrapper {
|
||||||
|
pub pus_5_handler: PusService5EventHandler,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Pus5Wrapper {
|
||||||
|
pub fn perform_operation(&mut self) -> bool {
|
||||||
|
match self.pus_5_handler.handle_next_packet() {
|
||||||
|
Ok(result) => match result {
|
||||||
|
PusPacketHandlerResult::RequestHandled => {}
|
||||||
|
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
|
||||||
|
warn!("PUS 5 partial packet handling success: {e:?}")
|
||||||
|
}
|
||||||
|
PusPacketHandlerResult::CustomSubservice(invalid, _) => {
|
||||||
|
warn!("PUS 5 invalid subservice {invalid}");
|
||||||
|
}
|
||||||
|
PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => {
|
||||||
|
warn!("PUS 5 subservice {subservice} not implemented");
|
||||||
|
}
|
||||||
|
PusPacketHandlerResult::Empty => {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(error) => {
|
||||||
|
error!("PUS packet handling error: {error:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
@ -1,59 +0,0 @@
|
|||||||
use satrs_core::event_man::{EventManager, EventManagerWithMpscQueue};
|
|
||||||
use satrs_core::events::EventU32;
|
|
||||||
use satrs_core::params::Params;
|
|
||||||
use satrs_core::pool::{SharedPool, StoreAddr};
|
|
||||||
use satrs_core::pus::event_man::EventReporter;
|
|
||||||
use satrs_core::pus::verification::{
|
|
||||||
StdVerifReporterWithSender, TcStateAccepted, VerificationToken,
|
|
||||||
};
|
|
||||||
use satrs_core::pus::{
|
|
||||||
AcceptedTc, PusPacketHandlerResult, PusPacketHandlingError, PusServiceBase, PusServiceHandler,
|
|
||||||
};
|
|
||||||
use satrs_core::tmtc::tm_helper::SharedTmStore;
|
|
||||||
use std::sync::mpsc::{Receiver, Sender};
|
|
||||||
|
|
||||||
pub struct PusService5EventHandler {
|
|
||||||
psb: PusServiceBase,
|
|
||||||
event_manager: EventManagerWithMpscQueue<EventU32, Params>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PusService5EventHandler {
|
|
||||||
pub fn new(
|
|
||||||
receiver: Receiver<AcceptedTc>,
|
|
||||||
tc_pool: SharedPool,
|
|
||||||
tm_tx: Sender<StoreAddr>,
|
|
||||||
tm_store: SharedTmStore,
|
|
||||||
tm_apid: u16,
|
|
||||||
verification_handler: StdVerifReporterWithSender,
|
|
||||||
event_manager: EventManagerWithMpscQueue<EventU32, Params>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
psb: PusServiceBase::new(
|
|
||||||
receiver,
|
|
||||||
tc_pool,
|
|
||||||
tm_tx,
|
|
||||||
tm_store,
|
|
||||||
tm_apid,
|
|
||||||
verification_handler,
|
|
||||||
),
|
|
||||||
event_manager,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PusServiceHandler for PusService5EventHandler {
|
|
||||||
fn psb_mut(&mut self) -> &mut PusServiceBase {
|
|
||||||
&mut self.psb
|
|
||||||
}
|
|
||||||
fn psb(&self) -> &PusServiceBase {
|
|
||||||
&self.psb
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_one_tc(
|
|
||||||
&mut self,
|
|
||||||
addr: StoreAddr,
|
|
||||||
token: VerificationToken<TcStateAccepted>,
|
|
||||||
) -> Result<PusPacketHandlerResult, PusPacketHandlingError> {
|
|
||||||
Ok(PusPacketHandlerResult::RequestHandled)
|
|
||||||
}
|
|
||||||
}
|
|
@ -9,11 +9,11 @@ use satrs_core::spacepackets::ecss::PusServiceId;
|
|||||||
use satrs_core::spacepackets::tc::PusTc;
|
use satrs_core::spacepackets::tc::PusTc;
|
||||||
use satrs_core::spacepackets::time::cds::TimeProvider;
|
use satrs_core::spacepackets::time::cds::TimeProvider;
|
||||||
use satrs_core::spacepackets::time::TimeWriter;
|
use satrs_core::spacepackets::time::TimeWriter;
|
||||||
use satrs_core::tmtc::tm_helper::{PusTmWithCdsShortHelper, SharedTmStore};
|
use satrs_core::tmtc::tm_helper::PusTmWithCdsShortHelper;
|
||||||
use satrs_example::{tmtc_err, CustomPusServiceId};
|
use satrs_example::{tmtc_err, CustomPusServiceId};
|
||||||
use std::sync::mpsc::Sender;
|
use std::sync::mpsc::Sender;
|
||||||
|
|
||||||
pub mod events;
|
pub mod event;
|
||||||
pub mod scheduler;
|
pub mod scheduler;
|
||||||
pub mod test;
|
pub mod test;
|
||||||
|
|
||||||
@ -25,31 +25,6 @@ pub struct PusTcMpscRouter {
|
|||||||
pub action_service_receiver: Sender<AcceptedTc>,
|
pub action_service_receiver: Sender<AcceptedTc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// impl PusTcRouter for PusTcMpscRouter {
|
|
||||||
// type Error = ();
|
|
||||||
//
|
|
||||||
// fn route_pus_tc(&mut self, apid: u16, service: u8, subservice: u8, tc: &PusTc) {
|
|
||||||
// if apid == PUS_APID {
|
|
||||||
// if service == PusServiceId::Event as u8 {
|
|
||||||
// self.event_service_receiver.send_tc(*tc).unwrap();
|
|
||||||
// }
|
|
||||||
// if service == PusServiceId::Action as u8 {
|
|
||||||
// // TODO: Look up object ID and then route the action request to that object.
|
|
||||||
// self.action_service_receiver.send_tc(*tc).unwrap();
|
|
||||||
// }
|
|
||||||
// if service == PusServiceId::Housekeeping as u8 {
|
|
||||||
// // TODO: Look up object ID and then route the HK request to that object.
|
|
||||||
// }
|
|
||||||
// if service == PusServiceId::Scheduling as u8 {
|
|
||||||
// self.sched_service_receiver.send_tc(*tc).unwrap();
|
|
||||||
// }
|
|
||||||
// if service == PusServiceId::Test as u8 {
|
|
||||||
// self.test_service_receiver.send_tc(*tc).unwrap();
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// todo!()
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
pub struct PusReceiver {
|
pub struct PusReceiver {
|
||||||
pub tm_helper: PusTmWithCdsShortHelper,
|
pub tm_helper: PusTmWithCdsShortHelper,
|
||||||
pub tm_args: PusTmArgs,
|
pub tm_args: PusTmArgs,
|
||||||
@ -58,10 +33,6 @@ pub struct PusReceiver {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct PusTmArgs {
|
pub struct PusTmArgs {
|
||||||
/// All telemetry is sent with this sender handle.
|
|
||||||
pub tm_tx: Sender<StoreAddr>,
|
|
||||||
/// All TM to be sent is stored here
|
|
||||||
pub tm_store: SharedTmStore,
|
|
||||||
/// All verification reporting is done with this reporter.
|
/// All verification reporting is done with this reporter.
|
||||||
pub verif_reporter: StdVerifReporterWithSender,
|
pub verif_reporter: StdVerifReporterWithSender,
|
||||||
/// Sequence count provider for TMs sent from within pus demultiplexer
|
/// Sequence count provider for TMs sent from within pus demultiplexer
|
||||||
@ -74,55 +45,9 @@ impl PusTmArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// pub struct PusTcHandlerBase {
|
|
||||||
// pub tc_store: Box<dyn PoolProvider>,
|
|
||||||
// pub receiver: Receiver<(StoreAddr, VerificationToken<TcStateAccepted>)>,
|
|
||||||
// pub verif_reporter: StdVerifReporterWithSender,
|
|
||||||
// pub time_provider: Box<dyn CcsdsTimeProvider>,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// pub trait TestHandlerNoPing {
|
|
||||||
// fn handle_no_ping_tc(&mut self, tc: PusTc);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// pub struct PusTestTcHandler {
|
|
||||||
// pub base: PusTcHandlerBase,
|
|
||||||
// handler: Option<Box<dyn TestHandlerNoPing>>,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// pub struct PusScheduleTcHandler {
|
|
||||||
// pub base: PusTestTcHandler,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// impl PusTestTcHandler {
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// pub fn operation(&mut self) {
|
|
||||||
// let (addr, token) = self.base.receiver.recv().unwrap();
|
|
||||||
// let data = self.base.tc_store.read(&addr).unwrap();
|
|
||||||
// let (pus_tc, _len) = PusTc::from_bytes(data).unwrap();
|
|
||||||
// let stamp: [u8; 7] = [0; 7];
|
|
||||||
// if pus_tc.subservice() == 1 {
|
|
||||||
// self.base
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(token, Some(&stamp))
|
|
||||||
// .unwrap();
|
|
||||||
// } else if let Some(handler) = &mut self.handler {
|
|
||||||
// handler.handle_no_ping_tc(pus_tc);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
pub struct PusTcArgs {
|
pub struct PusTcArgs {
|
||||||
//pub event_request_tx: Sender<EventRequestWithToken>,
|
|
||||||
/// This routes all telecommands to their respective recipients
|
/// This routes all telecommands to their respective recipients
|
||||||
pub pus_router: PusTcMpscRouter,
|
pub pus_router: PusTcMpscRouter,
|
||||||
/// Request routing helper. Maps targeted requests to their recipient.
|
|
||||||
//pub request_map: HashMap<TargetId, Sender<RequestWithToken>>,
|
|
||||||
/// Required for scheduling of telecommands.
|
|
||||||
//pub tc_source: PusTcSource,
|
|
||||||
/// Used to send events from within the TC router
|
/// Used to send events from within the TC router
|
||||||
pub event_sender: Sender<(EventU32, Option<Params>)>,
|
pub event_sender: Sender<(EventU32, Option<Params>)>,
|
||||||
}
|
}
|
||||||
@ -330,64 +255,6 @@ impl PusReceiver {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
// impl PusReceiver {
|
// impl PusReceiver {
|
||||||
// fn handle_test_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
|
||||||
// match PusPacket::subservice(pus_tc) {
|
|
||||||
// 1 => {
|
|
||||||
// info!("Received PUS ping command TC[17,1]");
|
|
||||||
// info!("Sending ping reply PUS TM[17,2]");
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending start success");
|
|
||||||
// let ping_reply = self.tm_helper.create_pus_tm_timestamp_now(
|
|
||||||
// 17,
|
|
||||||
// 2,
|
|
||||||
// None,
|
|
||||||
// self.tm_args.seq_count_provider.get(),
|
|
||||||
// );
|
|
||||||
// let addr = self.tm_args.tm_store.add_pus_tm(&ping_reply);
|
|
||||||
// self.tm_args
|
|
||||||
// .tm_tx
|
|
||||||
// .send(addr)
|
|
||||||
// .expect("Sending TM to TM funnel failed");
|
|
||||||
// self.tm_args.seq_count_provider.increment();
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending completion success");
|
|
||||||
// }
|
|
||||||
// 128 => {
|
|
||||||
// info!("Generating test event");
|
|
||||||
// self.tc_args
|
|
||||||
// .event_sender
|
|
||||||
// .send((TEST_EVENT.into(), None))
|
|
||||||
// .expect("Sending test event failed");
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending start success");
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending completion success");
|
|
||||||
// }
|
|
||||||
// _ => {
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_failure(
|
|
||||||
// token,
|
|
||||||
// FailParams::new(
|
|
||||||
// Some(self.stamp_helper.stamp()),
|
|
||||||
// &tmtc_err::INVALID_PUS_SUBSERVICE,
|
|
||||||
// None,
|
|
||||||
// ),
|
|
||||||
// )
|
|
||||||
// .expect("Sending start failure TM failed");
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
//
|
||||||
// fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
// fn handle_hk_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
||||||
// if pus_tc.user_data().is_none() {
|
// if pus_tc.user_data().is_none() {
|
||||||
@ -495,204 +362,6 @@ impl PusReceiver {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// fn handle_event_request(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
|
||||||
// let send_start_failure = |vr: &mut StdVerifReporterWithSender,
|
|
||||||
// timestamp: &[u8],
|
|
||||||
// failure_code: &ResultU16,
|
|
||||||
// failure_data: Option<&[u8]>| {
|
|
||||||
// vr.start_failure(
|
|
||||||
// token,
|
|
||||||
// FailParams::new(Some(timestamp), failure_code, failure_data),
|
|
||||||
// )
|
|
||||||
// .expect("Sending start failure TM failed");
|
|
||||||
// };
|
|
||||||
// let send_start_acceptance = |vr: &mut StdVerifReporterWithSender, timestamp: &[u8]| {
|
|
||||||
// vr.start_success(token, Some(timestamp))
|
|
||||||
// .expect("Sending start success TM failed")
|
|
||||||
// };
|
|
||||||
// if pus_tc.user_data().is_none() {
|
|
||||||
// send_start_failure(
|
|
||||||
// &mut self.tm_args.verif_reporter,
|
|
||||||
// self.stamp_helper.stamp(),
|
|
||||||
// &tmtc_err::NOT_ENOUGH_APP_DATA,
|
|
||||||
// None,
|
|
||||||
// );
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// let app_data = pus_tc.user_data().unwrap();
|
|
||||||
// if app_data.len() < 4 {
|
|
||||||
// send_start_failure(
|
|
||||||
// &mut self.tm_args.verif_reporter,
|
|
||||||
// self.stamp_helper.stamp(),
|
|
||||||
// &tmtc_err::NOT_ENOUGH_APP_DATA,
|
|
||||||
// None,
|
|
||||||
// );
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// let event_id = EventU32::from(u32::from_be_bytes(app_data.try_into().unwrap()));
|
|
||||||
// match PusPacket::subservice(pus_tc).try_into() {
|
|
||||||
// Ok(event::Subservice::TcEnableEventGeneration) => {
|
|
||||||
// let start_token = send_start_acceptance(
|
|
||||||
// &mut self.tm_args.verif_reporter,
|
|
||||||
// self.stamp_helper.stamp(),
|
|
||||||
// );
|
|
||||||
// self.tc_args
|
|
||||||
// .event_request_tx
|
|
||||||
// .send(EventRequestWithToken {
|
|
||||||
// request: EventRequest::Enable(event_id),
|
|
||||||
// token: start_token,
|
|
||||||
// })
|
|
||||||
// .expect("Sending event request failed");
|
|
||||||
// }
|
|
||||||
// Ok(event::Subservice::TcDisableEventGeneration) => {
|
|
||||||
// let start_token = send_start_acceptance(
|
|
||||||
// &mut self.tm_args.verif_reporter,
|
|
||||||
// self.stamp_helper.stamp(),
|
|
||||||
// );
|
|
||||||
// self.tc_args
|
|
||||||
// .event_request_tx
|
|
||||||
// .send(EventRequestWithToken {
|
|
||||||
// request: EventRequest::Disable(event_id),
|
|
||||||
// token: start_token,
|
|
||||||
// })
|
|
||||||
// .expect("Sending event request failed");
|
|
||||||
// }
|
|
||||||
// _ => {
|
|
||||||
// send_start_failure(
|
|
||||||
// &mut self.tm_args.verif_reporter,
|
|
||||||
// self.stamp_helper.stamp(),
|
|
||||||
// &tmtc_err::INVALID_PUS_SUBSERVICE,
|
|
||||||
// None,
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// fn handle_scheduled_tc(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
|
||||||
// let subservice = match pus_11_generic_tc_check(pus_tc) {
|
|
||||||
// Ok(subservice) => subservice,
|
|
||||||
// Err(e) => match e {
|
|
||||||
// GenericTcCheckError::NotEnoughAppData => {
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_failure(
|
|
||||||
// token,
|
|
||||||
// FailParams::new(
|
|
||||||
// Some(self.stamp_helper.stamp()),
|
|
||||||
// &tmtc_err::NOT_ENOUGH_APP_DATA,
|
|
||||||
// None,
|
|
||||||
// ),
|
|
||||||
// )
|
|
||||||
// .expect("could not sent verification error");
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// GenericTcCheckError::InvalidSubservice => {
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_failure(
|
|
||||||
// token,
|
|
||||||
// FailParams::new(
|
|
||||||
// Some(self.stamp_helper.stamp()),
|
|
||||||
// &tmtc_err::INVALID_PUS_SUBSERVICE,
|
|
||||||
// None,
|
|
||||||
// ),
|
|
||||||
// )
|
|
||||||
// .expect("could not sent verification error");
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// },
|
|
||||||
// };
|
|
||||||
// match subservice {
|
|
||||||
// scheduling::Subservice::TcEnableScheduling => {
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending start success");
|
|
||||||
//
|
|
||||||
// let mut scheduler = self.tc_args.scheduler.borrow_mut();
|
|
||||||
// scheduler.enable();
|
|
||||||
// if scheduler.is_enabled() {
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending completion success");
|
|
||||||
// } else {
|
|
||||||
// panic!("Failed to enable scheduler");
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// scheduling::Subservice::TcDisableScheduling => {
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending start success");
|
|
||||||
//
|
|
||||||
// let mut scheduler = self.tc_args.scheduler.borrow_mut();
|
|
||||||
// scheduler.disable();
|
|
||||||
// if !scheduler.is_enabled() {
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending completion success");
|
|
||||||
// } else {
|
|
||||||
// panic!("Failed to disable scheduler");
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// scheduling::Subservice::TcResetScheduling => {
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending start success");
|
|
||||||
//
|
|
||||||
// let mut pool = self
|
|
||||||
// .tc_args
|
|
||||||
// .tc_source
|
|
||||||
// .tc_store
|
|
||||||
// .pool
|
|
||||||
// .write()
|
|
||||||
// .expect("Locking pool failed");
|
|
||||||
//
|
|
||||||
// let mut scheduler = self.tc_args.scheduler.borrow_mut();
|
|
||||||
// scheduler
|
|
||||||
// .reset(pool.as_mut())
|
|
||||||
// .expect("Error resetting TC Pool");
|
|
||||||
// drop(scheduler);
|
|
||||||
//
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("Error sending completion success");
|
|
||||||
// }
|
|
||||||
// scheduling::Subservice::TcInsertActivity => {
|
|
||||||
// let start_token = self
|
|
||||||
// .tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .start_success(token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("error sending start success");
|
|
||||||
//
|
|
||||||
// let mut pool = self
|
|
||||||
// .tc_args
|
|
||||||
// .tc_source
|
|
||||||
// .tc_store
|
|
||||||
// .pool
|
|
||||||
// .write()
|
|
||||||
// .expect("locking pool failed");
|
|
||||||
// let mut scheduler = self.tc_args.scheduler.borrow_mut();
|
|
||||||
// scheduler
|
|
||||||
// .insert_wrapped_tc::<TimeProvider>(pus_tc, pool.as_mut())
|
|
||||||
// .expect("insertion of activity into pool failed");
|
|
||||||
// drop(scheduler);
|
|
||||||
//
|
|
||||||
// self.tm_args
|
|
||||||
// .verif_reporter
|
|
||||||
// .completion_success(start_token, Some(self.stamp_helper.stamp()))
|
|
||||||
// .expect("sending completion success failed");
|
|
||||||
// }
|
|
||||||
// _ => {}
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
//
|
||||||
// fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
// fn handle_mode_service(&mut self, pus_tc: &PusTc, token: VerificationToken<TcStateAccepted>) {
|
||||||
// let mut app_data_len = 0;
|
// let mut app_data_len = 0;
|
||||||
|
@ -3,12 +3,12 @@ use satrs_core::pus::scheduler_srv::PusService11SchedHandler;
|
|||||||
use satrs_core::pus::{PusPacketHandlerResult, PusServiceHandler};
|
use satrs_core::pus::{PusPacketHandlerResult, PusServiceHandler};
|
||||||
|
|
||||||
pub struct Pus11Wrapper {
|
pub struct Pus11Wrapper {
|
||||||
pub pus11_handler: PusService11SchedHandler,
|
pub pus_11_handler: PusService11SchedHandler,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pus11Wrapper {
|
impl Pus11Wrapper {
|
||||||
pub fn perform_operation(&mut self) -> bool {
|
pub fn perform_operation(&mut self) -> bool {
|
||||||
match self.pus11_handler.handle_next_packet() {
|
match self.pus_11_handler.handle_next_packet() {
|
||||||
Ok(result) => match result {
|
Ok(result) => match result {
|
||||||
PusPacketHandlerResult::RequestHandled => {}
|
PusPacketHandlerResult::RequestHandled => {}
|
||||||
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
|
PusPacketHandlerResult::RequestHandledPartialSuccess(e) => {
|
||||||
@ -17,6 +17,9 @@ impl Pus11Wrapper {
|
|||||||
PusPacketHandlerResult::CustomSubservice(invalid, _) => {
|
PusPacketHandlerResult::CustomSubservice(invalid, _) => {
|
||||||
warn!("PUS11 invalid subservice {invalid}");
|
warn!("PUS11 invalid subservice {invalid}");
|
||||||
}
|
}
|
||||||
|
PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => {
|
||||||
|
warn!("PUS11: Subservice {subservice} not implemented");
|
||||||
|
}
|
||||||
PusPacketHandlerResult::Empty => {
|
PusPacketHandlerResult::Empty => {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -33,6 +33,9 @@ impl Service17CustomWrapper {
|
|||||||
partial_err
|
partial_err
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
PusPacketHandlerResult::SubserviceNotImplemented(subservice, _) => {
|
||||||
|
warn!("PUS17: Subservice {subservice} not implemented")
|
||||||
|
}
|
||||||
PusPacketHandlerResult::CustomSubservice(subservice, token) => {
|
PusPacketHandlerResult::CustomSubservice(subservice, token) => {
|
||||||
let (buf, _) = self.pus17_handler.pus_tc_buf();
|
let (buf, _) = self.pus17_handler.pus_tc_buf();
|
||||||
let (tc, _) = PusTc::from_bytes(buf).unwrap();
|
let (tc, _) = PusTc::from_bytes(buf).unwrap();
|
||||||
|
@ -16,7 +16,6 @@ use crate::ccsds::CcsdsReceiver;
|
|||||||
use crate::pus::{PusReceiver, PusTcArgs, PusTcMpscRouter, PusTmArgs};
|
use crate::pus::{PusReceiver, PusTcArgs, PusTcMpscRouter, PusTmArgs};
|
||||||
use crate::requests::RequestWithToken;
|
use crate::requests::RequestWithToken;
|
||||||
use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
|
use satrs_core::pool::{SharedPool, StoreAddr, StoreError};
|
||||||
use satrs_core::pus::event_man::EventRequestWithToken;
|
|
||||||
use satrs_core::pus::scheduler::{PusScheduler, TcInfo};
|
use satrs_core::pus::scheduler::{PusScheduler, TcInfo};
|
||||||
use satrs_core::pus::verification::StdVerifReporterWithSender;
|
use satrs_core::pus::verification::StdVerifReporterWithSender;
|
||||||
use satrs_core::seq_count::SeqCountProviderSyncClonable;
|
use satrs_core::seq_count::SeqCountProviderSyncClonable;
|
||||||
@ -32,7 +31,6 @@ pub struct OtherArgs {
|
|||||||
pub sock_addr: SocketAddr,
|
pub sock_addr: SocketAddr,
|
||||||
pub verif_reporter: StdVerifReporterWithSender,
|
pub verif_reporter: StdVerifReporterWithSender,
|
||||||
pub event_sender: Sender<(EventU32, Option<Params>)>,
|
pub event_sender: Sender<(EventU32, Option<Params>)>,
|
||||||
pub event_request_tx: Sender<EventRequestWithToken>,
|
|
||||||
pub request_map: HashMap<u32, Sender<RequestWithToken>>,
|
pub request_map: HashMap<u32, Sender<RequestWithToken>>,
|
||||||
pub seq_count_provider: SeqCountProviderSyncClonable,
|
pub seq_count_provider: SeqCountProviderSyncClonable,
|
||||||
}
|
}
|
||||||
@ -161,8 +159,6 @@ pub fn core_tmtc_task(
|
|||||||
));
|
));
|
||||||
|
|
||||||
let pus_tm_args = PusTmArgs {
|
let pus_tm_args = PusTmArgs {
|
||||||
tm_tx: tm_args.tm_sink_sender,
|
|
||||||
tm_store: tm_args.tm_store.clone(),
|
|
||||||
verif_reporter: args.verif_reporter,
|
verif_reporter: args.verif_reporter,
|
||||||
seq_count_provider: args.seq_count_provider.clone(),
|
seq_count_provider: args.seq_count_provider.clone(),
|
||||||
};
|
};
|
||||||
|
Loading…
Reference in New Issue
Block a user