tweaks and fixes #24
@ -10,7 +10,7 @@ from tmtccmd.tmtc import DefaultPusQueueHelper
|
||||
from tmtccmd.pus.s11_tc_sched import create_time_tagged_cmd
|
||||
from tmtccmd.pus.s200_fsfw_mode import Subservice as ModeSubservice
|
||||
|
||||
from opssat_tmtc.camera_params import CameraParameters
|
||||
from opssat_tmtc.camera import CameraParameters
|
||||
from opssat_tmtc.common import (
|
||||
EXPERIMENT_APID,
|
||||
UniqueId,
|
||||
|
@ -25,9 +25,10 @@ pub enum ClientError {
|
||||
Io(#[from] io::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ClientResult {
|
||||
Ok,
|
||||
AttemptedReconnection,
|
||||
ConnectionLost,
|
||||
}
|
||||
|
||||
@ -77,7 +78,6 @@ impl TcpSppClientCommon {
|
||||
Err(e) => match e {
|
||||
mpsc::TryRecvError::Empty => break,
|
||||
mpsc::TryRecvError::Disconnected => {
|
||||
println!("god fuckikng damn it");
|
||||
log::error!("TM sender to TCP client has disconnected");
|
||||
break;
|
||||
}
|
||||
@ -91,6 +91,7 @@ impl TcpSppClientCommon {
|
||||
pub struct TcpSppClientStd {
|
||||
common: TcpSppClientCommon,
|
||||
read_and_idle_delay: Duration,
|
||||
reconnect_flag: bool,
|
||||
// Optional to allow periodic reconnection attempts on the TCP server.
|
||||
stream: Option<StdTcpStream>,
|
||||
}
|
||||
@ -114,6 +115,7 @@ impl TcpSppClientStd {
|
||||
tc_source_tx,
|
||||
validator: SimpleSpValidator::new(TcpComponent::Client, valid_ids.to_vec()),
|
||||
},
|
||||
reconnect_flag: false,
|
||||
read_and_idle_delay: read_timeout,
|
||||
stream: None,
|
||||
};
|
||||
@ -149,11 +151,17 @@ impl TcpSppClientStd {
|
||||
match client.read(&mut self.common.read_buf) {
|
||||
// Not sure if this can happen or this is actually an error condition..
|
||||
Ok(0) => {
|
||||
log::info!("server closed connection");
|
||||
// To avoid spam.
|
||||
if !self.reconnect_flag {
|
||||
log::info!("server closed connection");
|
||||
}
|
||||
self.stream = None;
|
||||
return Ok(ClientResult::ConnectionLost);
|
||||
}
|
||||
Ok(read_bytes) => self.common.handle_read_bytstream(read_bytes)?,
|
||||
Ok(read_bytes) => {
|
||||
self.reconnect_flag = false;
|
||||
self.common.handle_read_bytstream(read_bytes)?;
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == io::ErrorKind::WouldBlock || e.kind() == io::ErrorKind::TimedOut
|
||||
{
|
||||
@ -170,10 +178,14 @@ impl TcpSppClientStd {
|
||||
}
|
||||
} else {
|
||||
if self.attempt_connect(false)? {
|
||||
log::info!("reconnected to server succesfully");
|
||||
// To avoid spam.
|
||||
if !self.reconnect_flag {
|
||||
log::info!("reconnected to server succesfully");
|
||||
}
|
||||
self.reconnect_flag = true;
|
||||
return self.operation();
|
||||
}
|
||||
std::thread::sleep(self.read_and_idle_delay);
|
||||
return Ok(ClientResult::AttemptedReconnection);
|
||||
}
|
||||
|
||||
Ok(ClientResult::Ok)
|
||||
|
19
src/main.rs
19
src/main.rs
@ -24,7 +24,10 @@ use satrs::{
|
||||
pus::event_man::EventRequestWithToken,
|
||||
};
|
||||
|
||||
use crate::{controller::ControllerPathCollection, tmtc::tm_sink::TmFunnelDynamic};
|
||||
use crate::{
|
||||
controller::ControllerPathCollection, interface::tcp_spp_client::ClientResult,
|
||||
tmtc::tm_sink::TmFunnelDynamic,
|
||||
};
|
||||
use crate::{controller::ExperimentController, pus::test::create_test_service};
|
||||
use crate::{
|
||||
events::EventHandler,
|
||||
@ -280,9 +283,17 @@ fn main() {
|
||||
.spawn(move || {
|
||||
info!("Running TCP SPP client");
|
||||
loop {
|
||||
let result = tcp_spp_client.operation();
|
||||
if let Err(e) = result {
|
||||
log::error!("TCP SPP client error: {}", e);
|
||||
match tcp_spp_client.operation() {
|
||||
Ok(result) => {
|
||||
// If the client connection was processed regularly, the read timeout takes
|
||||
// care of the sleep time.
|
||||
if result != ClientResult::Ok {
|
||||
std::thread::sleep(STOP_CHECK_FREQUENCY);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("TCP SPP client error: {}", e);
|
||||
}
|
||||
}
|
||||
if tcp_client_stop_signal.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user