Fix various new clippy warnings

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1011>
This commit is contained in:
Sebastian Dröge 2022-12-13 11:43:16 +02:00
parent 289e8a08c3
commit 3f904553ea
27 changed files with 57 additions and 66 deletions

View file

@ -15,8 +15,8 @@ pub struct RingBuffer {
impl RingBuffer { impl RingBuffer {
pub fn new(size: usize) -> Self { pub fn new(size: usize) -> Self {
let mut buffer = Vec::with_capacity(size as usize); let mut buffer = Vec::with_capacity(size);
buffer.extend(iter::repeat(0.0).take(size as usize)); buffer.extend(iter::repeat(0.0).take(size));
Self { Self {
buffer: buffer.into_boxed_slice(), buffer: buffer.into_boxed_slice(),

View file

@ -611,9 +611,8 @@ impl State {
// the position where we have to start writing the next 100ms in the next // the position where we have to start writing the next 100ms in the next
// iteration. // iteration.
let mut outbuf = gst::Buffer::with_size( let mut outbuf =
self.current_samples_per_frame as usize * self.info.bpf() as usize, gst::Buffer::with_size(self.current_samples_per_frame * self.info.bpf() as usize)
)
.map_err(|_| gst::FlowError::Error)?; .map_err(|_| gst::FlowError::Error)?;
{ {
let outbuf = outbuf.get_mut().unwrap(); let outbuf = outbuf.get_mut().unwrap();
@ -819,7 +818,7 @@ impl State {
// adjustment. frame_type should only ever be set to Final at the end if we ended up in // adjustment. frame_type should only ever be set to Final at the end if we ended up in
// Inner state before. // Inner state before.
if self.frame_type == FrameType::First if self.frame_type == FrameType::First
&& (src.len() / self.info.channels() as usize) < self.current_samples_per_frame as usize && (src.len() / self.info.channels() as usize) < self.current_samples_per_frame
{ {
self.process_first_frame_is_last(imp)?; self.process_first_frame_is_last(imp)?;
} }

View file

@ -373,7 +373,7 @@ impl HrtfRender {
let (prev_offset, _) = state.adapter.prev_offset(); let (prev_offset, _) = state.adapter.prev_offset();
let offset = prev_offset.checked_add(distance_samples).unwrap_or(0); let offset = prev_offset.checked_add(distance_samples).unwrap_or(0);
let duration_samples = outputsz / outbpf as usize; let duration_samples = outputsz / outbpf;
let duration = samples_to_time(duration_samples as u64); let duration = samples_to_time(duration_samples as u64);
(pts, offset, duration) (pts, offset, duration)

View file

@ -260,10 +260,7 @@ fn csound_filter_underflow() {
} }
assert_eq!(num_buffers, UNDERFLOW_NUM_BUFFERS / 2); assert_eq!(num_buffers, UNDERFLOW_NUM_BUFFERS / 2);
assert_eq!( assert_eq!(num_samples, UNDERFLOW_NUM_SAMPLES * UNDERFLOW_NUM_BUFFERS);
num_samples as usize,
UNDERFLOW_NUM_SAMPLES * UNDERFLOW_NUM_BUFFERS
);
} }
// Verifies that the caps negotiation is properly done, by pushing buffers whose caps // Verifies that the caps negotiation is properly done, by pushing buffers whose caps

View file

@ -405,7 +405,7 @@ impl LewtonDec {
let outbuf = if let Some(ref reorder_map) = state.reorder_map { let outbuf = if let Some(ref reorder_map) = state.reorder_map {
let mut outbuf = self let mut outbuf = self
.obj() .obj()
.allocate_output_buffer(sample_count as usize * audio_info.bpf() as usize); .allocate_output_buffer(sample_count * audio_info.bpf() as usize);
{ {
// And copy the decoded data into our output buffer while reordering the channels to the // And copy the decoded data into our output buffer while reordering the channels to the
// GStreamer channel order // GStreamer channel order

View file

@ -498,7 +498,7 @@ impl Decrypter {
gst::debug!(CAT, obj: pad, "Requested offset: {}", offset); gst::debug!(CAT, obj: pad, "Requested offset: {}", offset);
gst::debug!(CAT, obj: pad, "Requested size: {}", requested_size); gst::debug!(CAT, obj: pad, "Requested size: {}", requested_size);
let chunk_index = offset as u64 / block_size as u64; let chunk_index = offset / block_size as u64;
gst::debug!(CAT, obj: pad, "Stream Block index: {}", chunk_index); gst::debug!(CAT, obj: pad, "Stream Block index: {}", chunk_index);
let pull_offset = offset - (chunk_index * block_size as u64); let pull_offset = offset - (chunk_index * block_size as u64);

View file

@ -158,7 +158,7 @@ fn main() {
let context = build_context(); let context = build_context();
let source = gst::ElementFactory::make("ts-tonesrc") let source = gst::ElementFactory::make("ts-tonesrc")
.name(format!("source-{}", i).as_str()) .name(format!("source-{}", i).as_str())
.property("samples-per-buffer", (wait as u32) * 8000 / 1000) .property("samples-per-buffer", wait * 8000 / 1000)
.property("context", &context) .property("context", &context)
.property("context-wait", wait) .property("context-wait", wait)
.build() .build()

View file

@ -28,7 +28,7 @@ use gst_rtp::RTPBuffer;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::cmp::{max, min, Ordering}; use std::cmp::Ordering;
use std::collections::{BTreeSet, VecDeque}; use std::collections::{BTreeSet, VecDeque};
use std::mem; use std::mem;
use std::sync::Arc; use std::sync::Arc;
@ -412,7 +412,7 @@ impl SinkHandler {
} }
if let Some(last_in_seqnum) = inner.last_in_seqnum { if let Some(last_in_seqnum) = inner.last_in_seqnum {
let gap = gst_rtp::compare_seqnum(last_in_seqnum as u16, seq); let gap = gst_rtp::compare_seqnum(last_in_seqnum, seq);
if gap == 1 { if gap == 1 {
self.calculate_packet_spacing(inner, &mut state, rtptime, pts); self.calculate_packet_spacing(inner, &mut state, rtptime, pts);
} else { } else {
@ -463,7 +463,7 @@ impl SinkHandler {
state.equidistant += 1; state.equidistant += 1;
} }
state.equidistant = min(max(state.equidistant, -7), 7); state.equidistant = state.equidistant.clamp(-7, 7);
inner.last_rtptime = Some(rtptime); inner.last_rtptime = Some(rtptime);
@ -679,7 +679,7 @@ impl SrcHandler {
// FIXME reason why we can expect Some for the 2 lines below // FIXME reason why we can expect Some for the 2 lines below
let mut last_popped_pts = state.last_popped_pts.unwrap(); let mut last_popped_pts = state.last_popped_pts.unwrap();
let interval = pts.into().unwrap().saturating_sub(last_popped_pts); let interval = pts.into().unwrap().saturating_sub(last_popped_pts);
let spacing = interval / (gap as u64 + 1); let spacing = interval / (gap + 1);
*discont = true; *discont = true;

View file

@ -225,7 +225,7 @@ fn main() -> Result<(), Error> {
"###, "###,
duration = duration, segment_timeline = segment_timeline); duration = duration, segment_timeline = segment_timeline);
std::fs::write(path, &manifest).expect("failed to write manifest"); std::fs::write(path, manifest).expect("failed to write manifest");
}) })
.build(), .build(),
); );

View file

@ -1111,8 +1111,8 @@ fn write_visual_sample_entry(
if let Ok(cll) = gst_video::VideoContentLightLevel::from_caps(&stream.caps) { if let Ok(cll) = gst_video::VideoContentLightLevel::from_caps(&stream.caps) {
write_box(v, b"clli", move |v| { write_box(v, b"clli", move |v| {
v.extend((cll.max_content_light_level() as u16).to_be_bytes()); v.extend((cll.max_content_light_level()).to_be_bytes());
v.extend((cll.max_frame_average_light_level() as u16).to_be_bytes()); v.extend((cll.max_frame_average_light_level()).to_be_bytes());
Ok(()) Ok(())
})?; })?;
} }

View file

@ -928,8 +928,8 @@ fn write_visual_sample_entry(
if let Ok(cll) = gst_video::VideoContentLightLevel::from_caps(&stream.caps) { if let Ok(cll) = gst_video::VideoContentLightLevel::from_caps(&stream.caps) {
write_box(v, b"clli", move |v| { write_box(v, b"clli", move |v| {
v.extend((cll.max_content_light_level() as u16).to_be_bytes()); v.extend((cll.max_content_light_level()).to_be_bytes());
v.extend((cll.max_frame_average_light_level() as u16).to_be_bytes()); v.extend((cll.max_frame_average_light_level()).to_be_bytes());
Ok(()) Ok(())
})?; })?;
} }
@ -1454,7 +1454,7 @@ fn write_cslg(
let composition_start_time = stream let composition_start_time = stream
.earliest_pts .earliest_pts
.nseconds() .nseconds()
.mul_div_round(timescale as u64, gst::ClockTime::SECOND.nseconds() as u64) .mul_div_round(timescale as u64, gst::ClockTime::SECOND.nseconds())
.context("too earliest PTS")?; .context("too earliest PTS")?;
v.extend(composition_start_time.to_be_bytes()); v.extend(composition_start_time.to_be_bytes());
@ -1462,7 +1462,7 @@ fn write_cslg(
let composition_end_time = stream let composition_end_time = stream
.end_pts .end_pts
.nseconds() .nseconds()
.mul_div_round(timescale as u64, gst::ClockTime::SECOND.nseconds() as u64) .mul_div_round(timescale as u64, gst::ClockTime::SECOND.nseconds())
.context("too end PTS")?; .context("too end PTS")?;
v.extend(composition_end_time.to_be_bytes()); v.extend(composition_end_time.to_be_bytes());

View file

@ -170,8 +170,8 @@ impl TranscribeParse {
} }
}; };
let start_pts = ((start_time as f64 * 1_000_000_000.0) as u64).nseconds(); let start_pts = ((start_time * 1_000_000_000.0) as u64).nseconds();
let end_pts = ((end_time as f64 * 1_000_000_000.0) as u64).nseconds(); let end_pts = ((end_time * 1_000_000_000.0) as u64).nseconds();
let duration = end_pts.saturating_sub(start_pts); let duration = end_pts.saturating_sub(start_pts);
if start_pts > last_pts { if start_pts > last_pts {

View file

@ -93,7 +93,7 @@ impl Playlist {
} }
self.playlist_index += 1; self.playlist_index += 1;
self.inner.media_sequence = self.playlist_index as u64 - self.inner.segments.len() as u64; self.inner.media_sequence = self.playlist_index - self.inner.segments.len() as u64;
} }
/// Sets the playlist to started state. /// Sets the playlist to started state.

View file

@ -915,14 +915,14 @@ impl Receiver {
real_time_now, real_time_now,
); );
let res_timestamp = self.0.observations_timestamp[if is_audio { 0 } else { 1 }].process( let res_timestamp = self.0.observations_timestamp[usize::from(!is_audio)].process(
element, element,
timestamp, timestamp,
receive_time, receive_time,
duration, duration,
); );
let res_timecode = self.0.observations_timecode[if is_audio { 0 } else { 1 }].process( let res_timecode = self.0.observations_timecode[usize::from(!is_audio)].process(
element, element,
Some(timecode), Some(timecode),
receive_time, receive_time,

View file

@ -187,8 +187,8 @@ impl OnvifMetadataOverlay {
gst_video::VideoFormat::Bgra, gst_video::VideoFormat::Bgra,
#[cfg(target_endian = "big")] #[cfg(target_endian = "big")]
gst_video::VideoFormat::Argb, gst_video::VideoFormat::Argb,
total_width as u32, total_width,
total_height as u32, total_height,
) )
.ok()?; .ok()?;

View file

@ -421,7 +421,7 @@ impl RaptorqEnc {
assert_eq!(state.packets.len(), state.seqnums.len()); assert_eq!(state.packets.len(), state.seqnums.len());
if state.packets.len() == state.protected_packets_num as usize { if state.packets.len() == state.protected_packets_num {
// We use current buffer timing as a base for repair packets timestamps // We use current buffer timing as a base for repair packets timestamps
let now_pts = buffer.pts(); let now_pts = buffer.pts();
let now_dts = buffer.dts_or_pts(); let now_dts = buffer.dts_or_pts();

View file

@ -370,12 +370,12 @@ impl RTPAv1Depay {
let mut bitreader = BitReader::endian(reader, ENDIANNESS); let mut bitreader = BitReader::endian(reader, ENDIANNESS);
parse_leb128(&mut bitreader) parse_leb128(&mut bitreader)
.map_err(err_opt!(self, leb_read)) .map_err(err_opt!(self, leb_read))
.ok()? as u32 .ok()?
} }
} else { } else {
element_size = parse_leb128(&mut BitReader::endian(&mut *reader, ENDIANNESS)) element_size = parse_leb128(&mut BitReader::endian(&mut *reader, ENDIANNESS))
.map_err(err_opt!(self, leb_read)) .map_err(err_opt!(self, leb_read))
.ok()? as u32; .ok()?;
is_last_obu = match rtp is_last_obu = match rtp
.payload_size() .payload_size()
.cmp(&(reader.position() as u32 + element_size)) .cmp(&(reader.position() as u32 + element_size))

View file

@ -471,7 +471,7 @@ impl RTPAv1Pay {
state.open_obu_fragment = true; state.open_obu_fragment = true;
size size
} else { } else {
last_obu.bytes.len() as u32 - last_obu.offset as usize as u32 last_obu.bytes.len() as u32 - last_obu.offset as u32
}; };
if !packet.omit_last_size_field { if !packet.omit_last_size_field {

View file

@ -397,8 +397,8 @@ impl Detector {
- self.last_received_packets.iter().next().unwrap().1.arrival; - self.last_received_packets.iter().next().unwrap().1.arrival;
let bits = self let bits = self
.last_received_packets .last_received_packets
.iter() .values()
.map(|(_seqnum, p)| p.size as f64) .map(|p| p.size as f64)
.sum::<f64>() .sum::<f64>()
* 8.; * 8.;
@ -523,7 +523,7 @@ impl Detector {
} }
} }
self.compute_loss_average(lost_packets as f64 / n_packets as f64); self.compute_loss_average(lost_packets / n_packets as f64);
} }
fn compute_loss_average(&mut self, loss_fraction: f64) { fn compute_loss_average(&mut self, loss_fraction: f64) {
@ -828,7 +828,7 @@ impl State {
} }
}; };
if effective_bitrate as f64 - target_bitrate as f64 > 5. * target_bitrate / 100. { if effective_bitrate as f64 - target_bitrate > 5. * target_bitrate / 100. {
gst::info!( gst::info!(
CAT, CAT,
"Effective rate {} >> target bitrate {} - we should avoid that \ "Effective rate {} >> target bitrate {} - we should avoid that \
@ -850,16 +850,11 @@ impl State {
let threshold_on_effective_bitrate = 1.5 * effective_bitrate as f64; let threshold_on_effective_bitrate = 1.5 * effective_bitrate as f64;
let increase = f64::max( let increase = f64::max(
1000.0f64, 1000.0f64,
f64::min(
alpha * avg_packet_size_bits,
// Stuffing should ensure that the effective bitrate is not // Stuffing should ensure that the effective bitrate is not
// < target bitrate, still, make sure to always increase // < target bitrate, still, make sure to always increase
// the bitrate by a minimum amount of 160.bits // the bitrate by a minimum amount of 160.bits
f64::max( (threshold_on_effective_bitrate - self.target_bitrate_on_delay as f64)
threshold_on_effective_bitrate - self.target_bitrate_on_delay as f64, .clamp(160.0, alpha * avg_packet_size_bits),
160.,
),
),
); );
/* Additive increase */ /* Additive increase */
@ -1259,8 +1254,8 @@ impl ObjectImpl for BandwidthEstimator {
the element to configure the starting bitrate, in which case the the element to configure the starting bitrate, in which case the
encoder should also use it as target bitrate", encoder should also use it as target bitrate",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_MIN_BITRATE as u32, DEFAULT_MIN_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),
glib::ParamSpecUInt::new( glib::ParamSpecUInt::new(
@ -1268,7 +1263,7 @@ impl ObjectImpl for BandwidthEstimator {
"Minimal Bitrate", "Minimal Bitrate",
"Minimal bitrate to use (in bit/sec) when computing it through the bandwidth estimation algorithm", "Minimal bitrate to use (in bit/sec) when computing it through the bandwidth estimation algorithm",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_MIN_BITRATE, DEFAULT_MIN_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),
@ -1277,7 +1272,7 @@ impl ObjectImpl for BandwidthEstimator {
"Maximal Bitrate", "Maximal Bitrate",
"Maximal bitrate to use (in bit/sec) when computing it through the bandwidth estimation algorithm", "Maximal bitrate to use (in bit/sec) when computing it through the bandwidth estimation algorithm",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_MAX_BITRATE, DEFAULT_MAX_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),

View file

@ -275,7 +275,7 @@ impl Default for Settings {
cc_info: CCInfo { cc_info: CCInfo {
heuristic: WebRTCSinkCongestionControl::GoogleCongestionControl, heuristic: WebRTCSinkCongestionControl::GoogleCongestionControl,
min_bitrate: DEFAULT_MIN_BITRATE, min_bitrate: DEFAULT_MIN_BITRATE,
max_bitrate: DEFAULT_MAX_BITRATE as u32, max_bitrate: DEFAULT_MAX_BITRATE,
start_bitrate: DEFAULT_START_BITRATE, start_bitrate: DEFAULT_START_BITRATE,
}, },
do_fec: DEFAULT_DO_FEC, do_fec: DEFAULT_DO_FEC,
@ -2400,7 +2400,7 @@ impl ObjectImpl for WebRTCSink {
"Minimal Bitrate", "Minimal Bitrate",
"Minimal bitrate to use (in bit/sec) when computing it through the congestion control algorithm", "Minimal bitrate to use (in bit/sec) when computing it through the congestion control algorithm",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_MIN_BITRATE, DEFAULT_MIN_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),
@ -2409,7 +2409,7 @@ impl ObjectImpl for WebRTCSink {
"Minimal Bitrate", "Minimal Bitrate",
"Minimal bitrate to use (in bit/sec) when computing it through the congestion control algorithm", "Minimal bitrate to use (in bit/sec) when computing it through the congestion control algorithm",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_MAX_BITRATE, DEFAULT_MAX_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),
@ -2418,7 +2418,7 @@ impl ObjectImpl for WebRTCSink {
"Start Bitrate", "Start Bitrate",
"Start bitrate to use (in bit/sec)", "Start bitrate to use (in bit/sec)",
1, 1,
u32::MAX as u32, u32::MAX,
DEFAULT_START_BITRATE, DEFAULT_START_BITRATE,
glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY, glib::ParamFlags::READWRITE | gst::PARAM_FLAG_MUTABLE_READY,
), ),

View file

@ -26,7 +26,7 @@ fn file_name_to_uri(name: &str) -> String {
r r
}; };
let url = url::Url::from_file_path(&input_path).unwrap(); let url = url::Url::from_file_path(input_path).unwrap();
url.to_string() url.to_string()
} }

View file

@ -57,7 +57,7 @@ impl CaptionFrame {
data.as_ptr() as *mut _, data.as_ptr() as *mut _,
i32::from(full), i32::from(full),
); );
data.set_len(len as usize); data.set_len(len);
String::from_utf8(data).map_err(|_| Error) String::from_utf8(data).map_err(|_| Error)
} }

View file

@ -184,7 +184,7 @@ impl Cea608ToTt {
m %= 60; m %= 60;
let ns = time % 1_000_000_000; let ns = time % 1_000_000_000;
(h as u64, m as u8, s as u8, (ns / 1_000_000) as u16) (h, m as u8, s as u8, (ns / 1_000_000) as u16)
} }
fn create_vtt_buffer( fn create_vtt_buffer(

View file

@ -131,7 +131,7 @@ impl State {
m %= 60; m %= 60;
let ns = time % 1_000_000_000; let ns = time % 1_000_000_000;
(h as u64, m as u8, s as u8, (ns / 1_000_000) as u16) (h, m as u8, s as u8, (ns / 1_000_000) as u16)
} }
fn create_vtt_buffer( fn create_vtt_buffer(

View file

@ -103,7 +103,7 @@ impl MccEnc {
FixedOffset::east_opt(creation_date.utc_offset().as_seconds() as i32) FixedOffset::east_opt(creation_date.utc_offset().as_seconds() as i32)
.and_then(|tz| { .and_then(|tz| {
tz.with_ymd_and_hms( tz.with_ymd_and_hms(
creation_date.year() as i32, creation_date.year(),
creation_date.month() as u32, creation_date.month() as u32,
creation_date.day_of_month() as u32, creation_date.day_of_month() as u32,
creation_date.hour() as u32, creation_date.hour() as u32,

View file

@ -468,7 +468,7 @@ impl ObjectImpl for Rav1Enc {
} }
"rdo-lookahead-frames" => { "rdo-lookahead-frames" => {
let settings = self.settings.lock().unwrap(); let settings = self.settings.lock().unwrap();
(settings.rdo_lookahead_frames as i32).to_value() (settings.rdo_lookahead_frames).to_value()
} }
"tune" => { "tune" => {
let settings = self.settings.lock().unwrap(); let settings = self.settings.lock().unwrap();

View file

@ -355,7 +355,7 @@ impl VideoAggregatorImpl for VideoCompare {
let max_distance_threshold = { let max_distance_threshold = {
let settings = self.settings.lock().unwrap(); let settings = self.settings.lock().unwrap();
settings.max_distance_threshold as f64 settings.max_distance_threshold
}; };
if message if message