a lot of clippy fixes

This commit is contained in:
Robin Müller 2022-12-20 17:23:11 +01:00
parent 73d3fd6644
commit 2532119fbe
6 changed files with 28 additions and 40 deletions

View File

@ -211,9 +211,9 @@ impl<E: 'static, Event: GenericEvent + Copy + 'static, AuxDataProvider: Clone +
/// Create an event manager where the sender table will be the [DefaultSenderTableProvider] /// Create an event manager where the sender table will be the [DefaultSenderTableProvider]
/// and the listener table will be the [DefaultListenerTableProvider]. /// and the listener table will be the [DefaultListenerTableProvider].
pub fn new(event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>) -> Self { pub fn new(event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>) -> Self {
let listener_table = Box::new(DefaultListenerTableProvider::default()); let listener_table: Box<DefaultListenerTableProvider> = Box::default();
let sender_table = let sender_table: Box<DefaultSenderTableProvider<E, Event, AuxDataProvider>> =
Box::new(DefaultSenderTableProvider::<E, Event, AuxDataProvider>::default()); Box::default();
Self::new_custom_tables(listener_table, sender_table, event_receiver) Self::new_custom_tables(listener_table, sender_table, event_receiver)
} }
} }

View File

@ -136,17 +136,14 @@ impl<RAW: ToBeBytes, GID, UID> EventBase<RAW, GID, UID> {
impl EventBase<u32, u16, u16> { impl EventBase<u32, u16, u16> {
#[inline] #[inline]
fn raw(&self) -> u32 { fn raw(&self) -> u32 {
(((self.severity as u32) << 30) | ((self.group_id as u32) << 16) | self.unique_id as u32) ((self.severity as u32) << 30) | ((self.group_id as u32) << 16) | self.unique_id as u32
as u32
} }
} }
impl EventBase<u16, u8, u8> { impl EventBase<u16, u8, u8> {
#[inline] #[inline]
fn raw(&self) -> u16 { fn raw(&self) -> u16 {
(((self.severity as u16) << 14) as u16 ((self.severity as u16) << 14) | ((self.group_id as u16) << 8) | self.unique_id as u16
| ((self.group_id as u16) << 8) as u16
| self.unique_id as u16) as u16
} }
} }

View File

@ -136,7 +136,7 @@ impl StoreAddr {
pub const INVALID_ADDR: u32 = 0xFFFFFFFF; pub const INVALID_ADDR: u32 = 0xFFFFFFFF;
pub fn raw(&self) -> u32 { pub fn raw(&self) -> u32 {
((self.pool_idx as u32) << 16) as u32 | self.packet_idx as u32 ((self.pool_idx as u32) << 16) | self.packet_idx as u32
} }
} }
@ -239,7 +239,7 @@ impl LocalPool {
fn validate_addr(&self, addr: &StoreAddr) -> Result<(), StoreError> { fn validate_addr(&self, addr: &StoreAddr) -> Result<(), StoreError> {
let pool_idx = addr.pool_idx as usize; let pool_idx = addr.pool_idx as usize;
if pool_idx as usize >= self.pool_cfg.cfg.len() { if pool_idx >= self.pool_cfg.cfg.len() {
return Err(StoreError::InvalidStoreId( return Err(StoreError::InvalidStoreId(
StoreIdError::InvalidSubpool(addr.pool_idx), StoreIdError::InvalidSubpool(addr.pool_idx),
Some(*addr), Some(*addr),

View File

@ -569,7 +569,7 @@ impl VerificationReporterBasic {
) -> Result<PusTm, EcssTmError<E>> { ) -> Result<PusTm, EcssTmError<E>> {
let mut source_data_len = size_of::<u32>(); let mut source_data_len = size_of::<u32>();
if let Some(step) = step { if let Some(step) = step {
source_data_len += step.byte_width() as usize; source_data_len += step.byte_width();
} }
source_buffer_large_enough(buf.len(), source_data_len)?; source_buffer_large_enough(buf.len(), source_data_len)?;
let mut idx = 0; let mut idx = 0;
@ -577,7 +577,7 @@ impl VerificationReporterBasic {
idx += RequestId::SIZE_AS_BYTES; idx += RequestId::SIZE_AS_BYTES;
if let Some(step) = step { if let Some(step) = step {
// Size check was done beforehand // Size check was done beforehand
step.write_to_be_bytes(&mut buf[idx..idx + step.byte_width() as usize]) step.write_to_be_bytes(&mut buf[idx..idx + step.byte_width()])
.unwrap(); .unwrap();
} }
let mut sp_header = SpHeader::tm_unseg(self.apid(), 0, 0).unwrap(); let mut sp_header = SpHeader::tm_unseg(self.apid(), 0, 0).unwrap();
@ -601,10 +601,9 @@ impl VerificationReporterBasic {
params: &'a FailParams, params: &'a FailParams,
) -> Result<PusTm, EcssTmError<E>> { ) -> Result<PusTm, EcssTmError<E>> {
let mut idx = 0; let mut idx = 0;
let mut source_data_len = let mut source_data_len = RequestId::SIZE_AS_BYTES + params.failure_code.byte_width();
RequestId::SIZE_AS_BYTES + params.failure_code.byte_width() as usize;
if let Some(step) = step { if let Some(step) = step {
source_data_len += step.byte_width() as usize; source_data_len += step.byte_width();
} }
if let Some(failure_data) = params.failure_data { if let Some(failure_data) = params.failure_data {
source_data_len += failure_data.len(); source_data_len += failure_data.len();
@ -614,14 +613,14 @@ impl VerificationReporterBasic {
idx += RequestId::SIZE_AS_BYTES; idx += RequestId::SIZE_AS_BYTES;
if let Some(step) = step { if let Some(step) = step {
// Size check done beforehand // Size check done beforehand
step.write_to_be_bytes(&mut buf[idx..idx + step.byte_width() as usize]) step.write_to_be_bytes(&mut buf[idx..idx + step.byte_width()])
.unwrap(); .unwrap();
idx += step.byte_width() as usize; idx += step.byte_width();
} }
params params
.failure_code .failure_code
.write_to_be_bytes(&mut buf[idx..idx + params.failure_code.byte_width() as usize])?; .write_to_be_bytes(&mut buf[idx..idx + params.failure_code.byte_width()])?;
idx += params.failure_code.byte_width() as usize; idx += params.failure_code.byte_width();
if let Some(failure_data) = params.failure_data { if let Some(failure_data) = params.failure_data {
buf[idx..idx + failure_data.len()].copy_from_slice(failure_data); buf[idx..idx + failure_data.len()].copy_from_slice(failure_data);
} }
@ -709,8 +708,8 @@ mod allocmod {
source_data_buf: vec![ source_data_buf: vec![
0; 0;
RequestId::SIZE_AS_BYTES RequestId::SIZE_AS_BYTES
+ cfg.step_field_width as usize + cfg.step_field_width
+ cfg.fail_code_field_width as usize + cfg.fail_code_field_width
+ cfg.max_fail_data_len + cfg.max_fail_data_len
], ],
seq_counter: cfg.seq_counter.clone(), seq_counter: cfg.seq_counter.clone(),

View File

@ -83,6 +83,7 @@ fn main() {
let sender = MpscVerifSender::new(tm_store.clone(), tm_funnel_tx.clone()); let sender = MpscVerifSender::new(tm_store.clone(), tm_funnel_tx.clone());
let verif_cfg = VerificationReporterCfg::new( let verif_cfg = VerificationReporterCfg::new(
PUS_APID, PUS_APID,
#[allow(clippy::box_default)]
Box::new(SimpleSeqCountProvider::default()), Box::new(SimpleSeqCountProvider::default()),
1, 1,
2, 2,

View File

@ -149,7 +149,7 @@ impl PusReceiver {
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return;
} }
let addressable_id = AddressableId::from_raw_be(&user_data).unwrap(); let addressable_id = AddressableId::from_raw_be(user_data).unwrap();
if !self.request_map.contains_key(&addressable_id.target_id) { if !self.request_map.contains_key(&addressable_id.target_id) {
self.update_time_stamp(); self.update_time_stamp();
self.verif_reporter self.verif_reporter
@ -160,27 +160,18 @@ impl PusReceiver {
.expect("Sending start failure TM failed"); .expect("Sending start failure TM failed");
return; return;
} }
let send_request = |request: HkRequest| {
let sender = self.request_map.get(&addressable_id.target_id).unwrap();
sender
.send(Request::HkRequest(request))
.expect(&format!("Sending HK request {:?} failed", request))
};
if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableGeneration as u8 { if PusPacket::subservice(pus_tc) == hk::Subservice::TcEnableGeneration as u8 {
let sender = self.request_map.get(&addressable_id.target_id).unwrap(); send_request(HkRequest::Enable(addressable_id.unique_id));
sender
.send(Request::HkRequest(HkRequest::Enable(
addressable_id.unique_id,
)))
.expect("Sending HK request failed")
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableGeneration as u8 { } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcDisableGeneration as u8 {
let sender = self.request_map.get(&addressable_id.target_id).unwrap(); send_request(HkRequest::Disable(addressable_id.unique_id));
sender
.send(Request::HkRequest(HkRequest::Disable(
addressable_id.unique_id,
)))
.expect("Sending HK request failed");
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 { } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcGenerateOneShotHk as u8 {
let sender = self.request_map.get(&addressable_id.target_id).unwrap(); send_request(HkRequest::OneShot(addressable_id.unique_id));
sender
.send(Request::HkRequest(HkRequest::OneShot(
addressable_id.unique_id,
)))
.expect("Sending HK request failed");
} else if PusPacket::subservice(pus_tc) == hk::Subservice::TcModifyCollectionInterval as u8 } else if PusPacket::subservice(pus_tc) == hk::Subservice::TcModifyCollectionInterval as u8
{ {
if user_data.len() < 12 {} if user_data.len() < 12 {}