Skip to content

Commit

Permalink
Fix warnings from latest clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
jhelovuo committed Nov 20, 2024
1 parent 620702e commit 7d66833
Show file tree
Hide file tree
Showing 15 changed files with 38 additions and 46 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ bit-vec = "0.8.0"
speedy = "0.8.0"
log = "0.4.11"
num-traits = "0.2"
num-derive = "0.4"
num-derive = "0.4.2"
serde = { version = "1.0", features = ["derive"] }
serde_repr="0.1"
byteorder = { version = "1.3", features = ["i128"] }
Expand Down
10 changes: 8 additions & 2 deletions src/dds/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,19 @@ use crate::serialization::{
/// The key is used to distinguish between different Instances of the data in a
/// DDS Topic.
///
/// DDS WITH_KEY Topics are similar to distributed key-value maps. A Sample
/// corresponds to a key-value-pair and the `Keyed` trait allows to extract the
/// key out of the pair. An Instance means all the Samples with the same Key.
/// These samples can be viewed as updates to the key. WITH_KEY topics also
/// support a Dispose operation, which corresponds to removing a key from the
/// map.
///
/// A `Keyed` type has an associated type `K`, which is the corresponding key
/// type. `K` must implement [`Key`]. Otherwise, `K` can be chosen to suit the
/// application. It is advisable that `K` is something that can be cloned with
/// reasonable effort.
///
/// [`Key`]: trait.Key.html
pub trait Keyed {
type K: Key;

Expand Down Expand Up @@ -91,7 +97,7 @@ impl KeyHash {
/// Note: When implementing Key, DeserializeOwned cannot and need not be
/// derived, as it is a type alias. Derive (or implement) the [`Deserialize`]
/// trait instead.
///
/// # Example
/// ```
/// use rustdds::*;
Expand Down
4 changes: 1 addition & 3 deletions src/dds/no_key/wrappers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,10 @@ where
{
type DecodedKey = ();

#[allow(clippy::unused_unit, clippy::semicolon_if_nothing_returned)]
// transform_decoded_key is supposed to return
// a value, but in this instance it is of type unit.

#[allow(clippy::unused_unit, clippy::semicolon_if_nothing_returned)]
fn transform_decoded_key(_decoded_key: Self::DecodedKey) -> () {
// #[allow()]
()
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/dds/participant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ pub struct DomainParticipantStatusStream<'a> {
status_listener: &'a DomainParticipantStatusListener,
}

impl<'a> Stream for DomainParticipantStatusStream<'a> {
impl Stream for DomainParticipantStatusStream<'_> {
type Item = DomainParticipantStatusEvent;

fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Expand All @@ -637,7 +637,7 @@ impl<'a> Stream for DomainParticipantStatusStream<'a> {
} // fn
}

impl<'a> FusedStream for DomainParticipantStatusStream<'a> {
impl FusedStream for DomainParticipantStatusStream<'_> {
fn is_terminated(&self) -> bool {
false
}
Expand Down
4 changes: 2 additions & 2 deletions src/dds/statusevents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ pub struct StatusReceiverStream<'a, T> {
terminated: AtomicBool,
}

impl<'a, T> Stream for StatusReceiverStream<'a, T> {
impl<T> Stream for StatusReceiverStream<'_, T> {
type Item = T;

fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Expand All @@ -242,7 +242,7 @@ impl<'a, T> Stream for StatusReceiverStream<'a, T> {
} // fn
}

impl<'a, T> FusedStream for StatusReceiverStream<'a, T> {
impl<T> FusedStream for StatusReceiverStream<'_, T> {
fn is_terminated(&self) -> bool {
self.terminated.load(Ordering::SeqCst)
}
Expand Down
4 changes: 0 additions & 4 deletions src/dds/with_key/datareader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ where
/// # }
/// # }
///
/// // WithKey is important
/// let topic = domain_participant.create_topic("some_topic".to_string(), "SomeType".to_string(), &qos, TopicKind::WithKey).unwrap();
/// let mut data_reader = subscriber.create_datareader::<SomeType, CDRDeserializerAdapter<_>>(&topic, None).unwrap();
///
Expand All @@ -174,7 +173,6 @@ where
/// }
/// }
/// ```
pub fn read(
&mut self,
max_samples: usize,
Expand Down Expand Up @@ -486,7 +484,6 @@ where
/// # }
/// # }
///
/// // WithKey is important
/// let topic = domain_participant.create_topic("some_topic".to_string(), "SomeType".to_string(), &qos, TopicKind::WithKey).unwrap();
/// let mut data_reader = subscriber.create_datareader::<SomeType, CDRDeserializerAdapter<_>>(&topic, None).unwrap();
///
Expand All @@ -496,7 +493,6 @@ where
/// // do something
/// }
/// ```
pub fn into_iterator(&mut self) -> ReadResult<impl Iterator<Item = Sample<D, D::K>>> {
// TODO: We could come up with a more efficient implementation than wrapping a
// take call
Expand Down
34 changes: 14 additions & 20 deletions src/dds/with_key/datawriter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -253,12 +253,9 @@ where
.fetch_sub(1, Ordering::Relaxed);
}

// This one function provides both get_matched_subscriptions and
// get_matched_subscription_data TODO: Maybe we could return references to the
// subscription data to avoid copying? But then what if the result set changes
// while the application processes it?

/// Manually refreshes liveliness if QoS allows it
/// Manually refreshes liveliness
///
/// Corresponds to DDS Spec 1.4 Section 2.2.2.4.2.22 assert_liveliness.
///
/// # Examples
///
Expand All @@ -282,14 +279,11 @@ where
/// }
/// }
///
/// // WithKey is important
/// let topic = domain_participant.create_topic("some_topic".to_string(), "SomeType".to_string(), &qos, TopicKind::WithKey).unwrap();
/// let data_writer = publisher.create_datawriter::<SomeType, CDRSerializerAdapter<_>>(&topic, None).unwrap();
///
/// data_writer.refresh_manual_liveliness();
/// ```
// TODO: What is this function? To what part of DDS spec does it correspond to?
pub fn refresh_manual_liveliness(&self) {
if let Some(lv) = self.qos().liveliness {
match lv {
Expand Down Expand Up @@ -752,16 +746,14 @@ where
/// }
/// }
///
/// // WithKey is important
/// let topic = domain_participant.create_topic("some_topic".to_string(), "SomeType".to_string(), &qos, TopicKind::WithKey).unwrap();
/// let data_writer = publisher.create_datawriter::<SomeType, CDRSerializerAdapter<_>>(&topic, None).unwrap();
///
/// data_writer.assert_liveliness().unwrap();
/// ```
// TODO: This cannot really fail, so could change type to () (alternatively,
// make send error visible) TODO: Better make send failure visible, so
// application can see if Discovery has failed.
///
/// An `Err` result means that livelines assertion message could not be sent,
/// likely because Discovery has too much work to do.
pub fn assert_liveliness(&self) -> WriteResult<(), ()> {
self.refresh_manual_liveliness();

Expand All @@ -773,11 +765,13 @@ where
writer_guid: self.guid(),
manual_assertion: true, // by definition of this function
})
.unwrap_or_else(|e| error!("assert_liveness - Failed to send DiscoveryCommand. {e:?}"));
.map_err(|e| {
error!("assert_liveness - Failed to send DiscoveryCommand. {e:?}");
WriteError::WouldBlock { data: () }
})
}
_other => (),
_other => Ok(()),
}
Ok(())
}

/// Unimplemented. <b>Do not use</b>.
Expand Down Expand Up @@ -963,14 +957,14 @@ where

// This is required, because AsyncWrite contains "D".
// TODO: Is it ok to promise Unpin here?
impl<'a, D, SA> Unpin for AsyncWrite<'a, D, SA>
impl<D, SA> Unpin for AsyncWrite<'_, D, SA>
where
D: Keyed,
SA: SerializerAdapter<D>,
{
}

impl<'a, D, SA> Future for AsyncWrite<'a, D, SA>
impl<D, SA> Future for AsyncWrite<'_, D, SA>
where
D: Keyed,
SA: SerializerAdapter<D>,
Expand Down Expand Up @@ -1049,7 +1043,7 @@ where
Fail(WriteError<()>),
}

impl<'a, D, SA> Future for AsyncWaitForAcknowledgments<'a, D, SA>
impl<D, SA> Future for AsyncWaitForAcknowledgments<'_, D, SA>
where
D: Keyed,
SA: SerializerAdapter<D>,
Expand Down
10 changes: 5 additions & 5 deletions src/dds/with_key/simpledatareader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -568,15 +568,15 @@ pub struct SimpleDataReaderStream<
// ----------------------------------------------

// https://users.rust-lang.org/t/take-in-impl-future-cannot-borrow-data-in-a-dereference-of-pin/52042
impl<'a, D, S, DA> Unpin for SimpleDataReaderStream<'a, D, S, DA>
impl<D, S, DA> Unpin for SimpleDataReaderStream<'_, D, S, DA>
where
D: Keyed + 'static,
DA: DeserializerAdapter<D>,
S: Decode<DA::Decoded, DA::DecodedKey> + Unpin,
{
}

impl<'a, D, S, DA> Stream for SimpleDataReaderStream<'a, D, S, DA>
impl<D, S, DA> Stream for SimpleDataReaderStream<'_, D, S, DA>
where
D: Keyed + 'static,
DA: DeserializerAdapter<D>,
Expand Down Expand Up @@ -633,7 +633,7 @@ where
} // fn
} // impl

impl<'a, D, S, DA> FusedStream for SimpleDataReaderStream<'a, D, S, DA>
impl<D, S, DA> FusedStream for SimpleDataReaderStream<'_, D, S, DA>
where
D: Keyed + 'static,
DA: DeserializerAdapter<D>,
Expand All @@ -655,7 +655,7 @@ pub struct SimpleDataReaderEventStream<
simple_datareader: &'a SimpleDataReader<D, DA>,
}

impl<'a, D, DA> Stream for SimpleDataReaderEventStream<'a, D, DA>
impl<D, DA> Stream for SimpleDataReaderEventStream<'_, D, DA>
where
D: Keyed + 'static,
DA: DeserializerAdapter<D>,
Expand All @@ -673,7 +673,7 @@ where
} // fn
} // impl

impl<'a, D, DA> FusedStream for SimpleDataReaderEventStream<'a, D, DA>
impl<D, DA> FusedStream for SimpleDataReaderEventStream<'_, D, DA>
where
D: Keyed + 'static,
DA: DeserializerAdapter<D>,
Expand Down
1 change: 0 additions & 1 deletion src/messages/submessages/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ impl Data {
/// DATA submessage cannot be speedy Readable because deserializing this
/// requires info from submessage header. Required information is expect_qos
/// and expect_payload, which are told on submessage header flags.
pub fn deserialize_data(buffer: &Bytes, flags: BitFlags<DATA_Flags>) -> io::Result<Self> {
let mut cursor = io::Cursor::new(&buffer);
let endianness = endianness_flag(flags.bits());
Expand Down
2 changes: 1 addition & 1 deletion src/rtps/dp_event_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -623,7 +623,7 @@ impl DPEventLoop {
if *reader_eid == EntityId::P2P_BUILTIN_PARTICIPANT_MESSAGE_READER
&& discovered_participant
.builtin_endpoint_qos
.map_or(false, |beq| beq.is_best_effort())
.is_some_and(|beq| beq.is_best_effort())
{
qos.reliability = Some(policy::Reliability::BestEffort);
};
Expand Down
1 change: 0 additions & 1 deletion src/rtps/message_receiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ impl Default for MessageReceiverState {
/// submessages. Then it processes the instructions in the Interpreter
/// SUbmessages and forwards data in Entity Submessages to the appropriate
/// Entities. (See RTPS spec Section 8.3.7)
pub(crate) struct MessageReceiver {
pub available_readers: BTreeMap<EntityId, Reader>,
// GuidPrefix sent in this channel needs to be RTPSMessage source_guid_prefix. Writer needs this
Expand Down
2 changes: 1 addition & 1 deletion src/rtps/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ impl Reader {
self
.fragment_assemblers
.get(&writer_guid)
.map_or(false, |fa| fa.is_partially_received(seq))
.is_some_and(|fa| fa.is_partially_received(seq))
}

// common parts of processing DATA or a completed DATAFRAG (when all frags are
Expand Down
2 changes: 1 addition & 1 deletion src/rtps/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1111,7 +1111,7 @@ impl Writer {
let completed = self
.ack_waiter
.as_mut()
.map_or(false, |aw| aw.reader_acked_or_lost(guid, acked_before));
.is_some_and(|aw| aw.reader_acked_or_lost(guid, acked_before));
if completed {
self
.ack_waiter
Expand Down
2 changes: 1 addition & 1 deletion src/serialization/speedy_pl_cdr_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ impl<C: Context> Writable<C> for StringWithNul {
// GBytes? RTPS does not support that.

// TODO: Should align to 4 before writing
writer.write_u32((self.string.as_bytes().len() + 1).try_into().unwrap())?; // +1 for NUL character
writer.write_u32((self.string.len() + 1).try_into().unwrap())?; // +1 for NUL character
writer.write_slice(self.string.as_bytes())?;
writer.write_u8(0)?; // NUL character
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion src/structure/dds_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ use super::cache_change::CacheChange;
/// for Readers is scoped over the entire Topic (or Instances in it). This leads
/// to situations where some sample would be garbage collected by a Reader's
/// History policy, but must be preserved by a Writer's History Policy.
///
/// Each topic that has been subscribed to
/// is contained in a separate TopicCache. One TopicCache contains
/// only CacheChanges of one serialized IDL datatype. -> all cache changes in
Expand Down

0 comments on commit 7d66833

Please sign in to comment.