Fix some new clippy 1.84 warnings

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/2032>
This commit is contained in:
Sebastian Dröge 2025-01-10 09:57:00 +02:00
parent 69f927cc24
commit 7b4665c793
14 changed files with 28 additions and 58 deletions

View file

@ -1438,7 +1438,7 @@ fn write_stts(
) )
.context("too big sample duration")?; .context("too big sample duration")?;
if last_duration.map_or(true, |last_duration| last_duration != duration) { if last_duration != Some(duration) {
if let Some(last_duration) = last_duration { if let Some(last_duration) = last_duration {
v.extend(sample_count.to_be_bytes()); v.extend(sample_count.to_be_bytes());
v.extend(last_duration.to_be_bytes()); v.extend(last_duration.to_be_bytes());
@ -1489,9 +1489,7 @@ fn write_ctts(
.mul_div_round(timescale as i64, gst::ClockTime::SECOND.nseconds() as i64) .mul_div_round(timescale as i64, gst::ClockTime::SECOND.nseconds() as i64)
.context("too big sample composition time offset")?; .context("too big sample composition time offset")?;
if last_composition_time_offset.map_or(true, |last_composition_time_offset| { if last_composition_time_offset != Some(composition_time_offset) {
last_composition_time_offset != composition_time_offset
}) {
if let Some(last_composition_time_offset) = last_composition_time_offset { if let Some(last_composition_time_offset) = last_composition_time_offset {
v.extend(sample_count.to_be_bytes()); v.extend(sample_count.to_be_bytes());
if version == FULL_BOX_VERSION_0 { if version == FULL_BOX_VERSION_0 {
@ -1689,9 +1687,7 @@ fn write_stsc(
let mut first_chunk = 1u32; let mut first_chunk = 1u32;
let mut samples_per_chunk: Option<u32> = None; let mut samples_per_chunk: Option<u32> = None;
for (idx, chunk) in stream.chunks.iter().enumerate() { for (idx, chunk) in stream.chunks.iter().enumerate() {
if samples_per_chunk.map_or(true, |samples_per_chunk| { if samples_per_chunk != Some(chunk.samples.len() as u32) {
samples_per_chunk != chunk.samples.len() as u32
}) {
if let Some(samples_per_chunk) = samples_per_chunk { if let Some(samples_per_chunk) = samples_per_chunk {
v.extend(first_chunk.to_be_bytes()); v.extend(first_chunk.to_be_bytes());
v.extend(samples_per_chunk.to_be_bytes()); v.extend(samples_per_chunk.to_be_bytes());

View file

@ -338,13 +338,9 @@ impl HlsBaseSink {
gst::trace!(CAT, imp = self, "Segment location formatted: {}", location); gst::trace!(CAT, imp = self, "Segment location formatted: {}", location);
let stream = match self let stream = self
.obj() .obj()
.emit_by_name::<Option<gio::OutputStream>>(SIGNAL_GET_FRAGMENT_STREAM, &[&location]) .emit_by_name::<Option<gio::OutputStream>>(SIGNAL_GET_FRAGMENT_STREAM, &[&location])?;
{
Some(stream) => stream,
None => return None,
};
Some((stream, location)) Some((stream, location))
} }

View file

@ -166,34 +166,25 @@ impl RtpBaseDepay2Impl for RtpAmrDepay {
// encoding-params="1", (channels), default // encoding-params="1", (channels), default
// crc={"0", "1"}, default "0" // crc={"0", "1"}, default "0"
if s.get::<&str>("robust-sorting") if s.get::<&str>("robust-sorting").is_ok_and(|s| s != "0") {
.ok()
.map_or(false, |s| s != "0")
{
gst::error!(CAT, imp = self, "Only robust-sorting=0 supported"); gst::error!(CAT, imp = self, "Only robust-sorting=0 supported");
return false; return false;
} }
if s.get::<&str>("interleaving") if s.get::<&str>("interleaving").is_ok_and(|s| s != "0") {
.ok()
.map_or(false, |s| s != "0")
{
gst::error!(CAT, imp = self, "Only interleaving=0 supported"); gst::error!(CAT, imp = self, "Only interleaving=0 supported");
return false; return false;
} }
if s.get::<&str>("encoding-params") if s.get::<&str>("encoding-params").is_ok_and(|s| s != "1") {
.ok()
.map_or(false, |s| s != "1")
{
gst::error!(CAT, imp = self, "Only encoding-params=1 supported"); gst::error!(CAT, imp = self, "Only encoding-params=1 supported");
return false; return false;
} }
let mut state = self.state.borrow_mut(); let mut state = self.state.borrow_mut();
let has_crc = s.get::<&str>("crc").ok().map_or(false, |s| s != "0"); let has_crc = s.get::<&str>("crc").is_ok_and(|s| s != "0");
let bandwidth_efficient = s.get::<&str>("octet-align").ok().map_or(true, |s| s != "1"); let bandwidth_efficient = s.get::<&str>("octet-align") != Ok("1");
if bandwidth_efficient && has_crc { if bandwidth_efficient && has_crc {
gst::error!( gst::error!(

View file

@ -564,9 +564,7 @@ impl RtpAmrPay {
let is_ready = drain let is_ready = drain
|| agg_mode != super::AggregateMode::Aggregate || agg_mode != super::AggregateMode::Aggregate
|| queued_bytes + avg_bytes > max_payload_size || queued_bytes + avg_bytes > max_payload_size
|| (max_ptime.map_or(false, |max_ptime| { || (max_ptime.is_some_and(|max_ptime| queued_duration + avg_duration > max_ptime));
queued_duration + avg_duration > max_ptime
}));
gst::log!( gst::log!(
CAT, CAT,

View file

@ -1011,7 +1011,7 @@ impl RtpRecv {
if !split_bufferlist if !split_bufferlist
&& previous_jb && previous_jb
.as_ref() .as_ref()
.map_or(false, |previous| !Arc::ptr_eq(previous, &jb)) .is_some_and(|previous| !Arc::ptr_eq(previous, &jb))
{ {
split_bufferlist = true; split_bufferlist = true;
} }

View file

@ -923,10 +923,7 @@ impl RemoteSendSource {
pub(crate) fn request_remote_key_unit(&mut self, _now: Instant, typ: KeyUnitRequestType) { pub(crate) fn request_remote_key_unit(&mut self, _now: Instant, typ: KeyUnitRequestType) {
match typ { match typ {
KeyUnitRequestType::Fir(count) => { KeyUnitRequestType::Fir(count) => {
if self if self.send_fir_count != Some(count) {
.send_fir_count
.map_or(true, |previous_count| previous_count != count)
{
self.send_fir_seqnum = self.send_fir_seqnum.wrapping_add(1); self.send_fir_seqnum = self.send_fir_seqnum.wrapping_add(1);
} }
self.send_fir = true; self.send_fir = true;

View file

@ -473,12 +473,12 @@ impl crate::basedepay::RtpBaseDepay2Impl for RtpVp9Depay {
// keep the last one around as that should theoretically be the one with the highest // keep the last one around as that should theoretically be the one with the highest
// resolution and profile. // resolution and profile.
if payload_descriptor.start_of_frame if payload_descriptor.start_of_frame
&& state.current_picture_payload_descriptor.as_ref().map_or( && state
false, .current_picture_payload_descriptor
|current_picture_payload_descriptor| { .as_ref()
.is_some_and(|current_picture_payload_descriptor| {
!current_picture_payload_descriptor.inter_picture_predicted_frame !current_picture_payload_descriptor.inter_picture_predicted_frame
}, })
)
{ {
let mut r = BitReader::endian(&mut cursor, BigEndian); let mut r = BitReader::endian(&mut cursor, BigEndian);
// We assume that the beginning of the frame header fits into the first packet // We assume that the beginning of the frame header fits into the first packet

View file

@ -652,10 +652,7 @@ impl Codec {
} }
}, },
|encoding_names| { |encoding_names| {
encoding_names.iter().any(|v| { encoding_names.iter().any(|v| v.get::<&str>() == Ok(codec))
v.get::<&str>()
.map_or(false, |encoding_name| encoding_name == codec)
})
}, },
) )
}) })

View file

@ -2898,7 +2898,7 @@ impl BaseWebRTCSink {
#[strong] #[strong]
session_id, session_id,
move |_webrtcbin: gst::Element, _bin: gst::Bin, e: gst::Element| { move |_webrtcbin: gst::Element, _bin: gst::Bin, e: gst::Element| {
if e.factory().map_or(false, |f| f.name() == "rtprtxsend") { if e.factory().is_some_and(|f| f.name() == "rtprtxsend") {
if e.has_property_with_type("stuffing-kbps", i32::static_type()) { if e.has_property_with_type("stuffing-kbps", i32::static_type()) {
element.imp().set_rtptrxsend(&session_id, e); element.imp().set_rtptrxsend(&session_id, e);
} else { } else {
@ -2927,7 +2927,7 @@ impl BaseWebRTCSink {
#[watch] #[watch]
element, element,
move |_webrtcbin: gst::Element, _bin: gst::Bin, e: gst::Element| { move |_webrtcbin: gst::Element, _bin: gst::Bin, e: gst::Element| {
if e.factory().map_or(false, |f| f.name() == "nicesink") { if e.factory().is_some_and(|f| f.name() == "nicesink") {
let sinkpad = e.static_pad("sink").unwrap(); let sinkpad = e.static_pad("sink").unwrap();
let session_id = session_id.clone(); let session_id = session_id.clone();

View file

@ -465,7 +465,7 @@ impl JsonGstParse {
"Duration scan done, last_pts: {:?}", "Duration scan done, last_pts: {:?}",
last_pts last_pts
); );
break (Ok(last_pts)); break Ok(last_pts);
} }
} }
} }

View file

@ -234,10 +234,9 @@ impl GstObjectImpl for PcapWriter {}
fn pad_is_wanted(pad: &gst::Pad, settings: &Settings) -> bool { fn pad_is_wanted(pad: &gst::Pad, settings: &Settings) -> bool {
if let Some(factory_name) = settings.target_factory.as_ref() { if let Some(factory_name) = settings.target_factory.as_ref() {
return pad.parent().map_or(false, |p| { return pad.parent().is_some_and(|p| {
p.downcast::<gst::Element>().map_or(false, |e| { p.downcast::<gst::Element>()
e.factory().map_or(false, |f| f.name() == *factory_name) .is_ok_and(|e| e.factory().is_some_and(|f| f.name() == *factory_name))
})
}); });
} }

View file

@ -676,7 +676,7 @@ impl MccParse {
"Duration scan done, last_tc: {:?}", "Duration scan done, last_tc: {:?}",
last_tc last_tc
); );
break (Ok(last_tc)); break Ok(last_tc);
} }
} }
} }

View file

@ -591,7 +591,7 @@ impl SccParse {
"Duration scan done, last_tc: {:?}", "Duration scan done, last_tc: {:?}",
last_tc last_tc
); );
break (Ok(last_tc)); break Ok(last_tc);
} }
} }
} }

View file

@ -80,11 +80,7 @@ impl ColorDetect {
let dominant_color_name = let dominant_color_name =
color_name::Color::similar([dominant_color.r, dominant_color.g, dominant_color.b]) color_name::Color::similar([dominant_color.r, dominant_color.g, dominant_color.b])
.to_lowercase(); .to_lowercase();
if state if state.current_color.as_ref() != Some(&dominant_color_name) {
.current_color
.as_ref()
.map_or(true, |current_color| current_color != &dominant_color_name)
{
let name = dominant_color_name.clone(); let name = dominant_color_name.clone();
state.current_color = Some(dominant_color_name); state.current_color = Some(dominant_color_name);
return Ok(Some((name, palette))); return Ok(Some((name, palette)));