diff --git a/examples/src/bin/thumbnail.rs b/examples/src/bin/thumbnail.rs index b29a85372..d64c3da09 100644 --- a/examples/src/bin/thumbnail.rs +++ b/examples/src/bin/thumbnail.rs @@ -3,8 +3,9 @@ // {uridecodebin} - {videoconvert} - {appsink} -// The appsink enforces RGBA so that the image crate can use it. The image crate also requires -// tightly packed pixels, which is the case for RGBA by default in GStreamer. +// The appsink enforces RGBx so that the image crate can use it. The sample layout is passed +// with the correct stride from GStreamer to the image crate as GStreamer does not necessarily +// produce tightly packed pixels, and in case of RGBx never. use gst::element_error; use gst::prelude::*; @@ -54,7 +55,7 @@ fn create_pipeline(uri: String, out_path: std::path::PathBuf) -> Result Result Result, _>::from_raw( - info.width(), - info.height(), - map, - ) - .expect("Failed to create ImageBuffer, probably a stride mismatch"); + // Create a FlatSamples around the borrowed video frame data from GStreamer with + // the correct stride as provided by GStreamer. + let img = image::FlatSamples::<&[u8]> { + samples: frame.plane_data(0).unwrap(), + layout: image::flat::SampleLayout { + channels: 3, // RGB + channel_stride: 1, // 1 byte from component to component + width: frame.width(), + width_stride: 4, // 4 byte from pixel to pixel + height: frame.height(), + height_stride: frame.plane_stride()[0] as usize, // stride from line to line + }, + color_hint: Some(image::ColorType::Rgb8), + }; // Scale image to our target dimensions - let scaled_img = - image::imageops::thumbnail(&img, target_width as u32, target_height as u32); + let scaled_img = image::imageops::thumbnail( + &img.as_view::>() + .expect("couldn't create image view"), + target_width as u32, + target_height as u32, + ); // Save it at the specific location. This automatically detects the file type // based on the filename.