forked from mirrors/gstreamer-rs
examples: Use VideoFrame API in thumbnail example, use RGBX as format and correctly handle stride
This commit is contained in:
parent
e3a65a3a88
commit
1480c65c32
1 changed files with 38 additions and 25 deletions
|
@ -3,8 +3,9 @@
|
||||||
|
|
||||||
// {uridecodebin} - {videoconvert} - {appsink}
|
// {uridecodebin} - {videoconvert} - {appsink}
|
||||||
|
|
||||||
// The appsink enforces RGBA so that the image crate can use it. The image crate also requires
|
// The appsink enforces RGBx so that the image crate can use it. The sample layout is passed
|
||||||
// tightly packed pixels, which is the case for RGBA by default in GStreamer.
|
// with the correct stride from GStreamer to the image crate as GStreamer does not necessarily
|
||||||
|
// produce tightly packed pixels, and in case of RGBx never.
|
||||||
|
|
||||||
use gst::element_error;
|
use gst::element_error;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
@ -54,7 +55,7 @@ fn create_pipeline(uri: String, out_path: std::path::PathBuf) -> Result<gst::Pip
|
||||||
// both elements will happen during pre-rolling of the pipeline.
|
// both elements will happen during pre-rolling of the pipeline.
|
||||||
appsink.set_caps(Some(
|
appsink.set_caps(Some(
|
||||||
&gst::Caps::builder("video/x-raw")
|
&gst::Caps::builder("video/x-raw")
|
||||||
.field("format", gst_video::VideoFormat::Rgba.to_str())
|
.field("format", gst_video::VideoFormat::Rgbx.to_str())
|
||||||
.build(),
|
.build(),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -78,15 +79,15 @@ fn create_pipeline(uri: String, out_path: std::path::PathBuf) -> Result<gst::Pip
|
||||||
gst::FlowError::Error
|
gst::FlowError::Error
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let caps = sample.caps().expect("Sample without caps");
|
|
||||||
let info = gst_video::VideoInfo::from_caps(caps).expect("Failed to parse caps");
|
|
||||||
|
|
||||||
// Make sure that we only get a single buffer
|
// Make sure that we only get a single buffer
|
||||||
if got_snapshot {
|
if got_snapshot {
|
||||||
return Err(gst::FlowError::Eos);
|
return Err(gst::FlowError::Eos);
|
||||||
}
|
}
|
||||||
got_snapshot = true;
|
got_snapshot = true;
|
||||||
|
|
||||||
|
let caps = sample.caps().expect("Sample without caps");
|
||||||
|
let info = gst_video::VideoInfo::from_caps(caps).expect("Failed to parse caps");
|
||||||
|
|
||||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||||
// When we want to access its content, we have to map it while requesting the required
|
// When we want to access its content, we have to map it while requesting the required
|
||||||
// mode of access (read, read/write).
|
// mode of access (read, read/write).
|
||||||
|
@ -94,37 +95,49 @@ fn create_pipeline(uri: String, out_path: std::path::PathBuf) -> Result<gst::Pip
|
||||||
// on the machine's main memory itself, but rather in the GPU's memory.
|
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||||
// So mapping the buffer makes the underlying memory region accessible to us.
|
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||||
let map = buffer.map_readable().map_err(|_| {
|
let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, &info)
|
||||||
element_error!(
|
.map_err(|_| {
|
||||||
appsink,
|
element_error!(
|
||||||
gst::ResourceError::Failed,
|
appsink,
|
||||||
("Failed to map buffer readable")
|
gst::ResourceError::Failed,
|
||||||
);
|
("Failed to map buffer readable")
|
||||||
|
);
|
||||||
|
|
||||||
gst::FlowError::Error
|
gst::FlowError::Error
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// We only want to have a single buffer and then have the pipeline terminate
|
// We only want to have a single buffer and then have the pipeline terminate
|
||||||
println!("Have video frame");
|
println!("Have video frame");
|
||||||
|
|
||||||
// Calculate a target width/height that keeps the display aspect ratio while having
|
// Calculate a target width/height that keeps the display aspect ratio while having
|
||||||
// a height of 240 pixels
|
// a height of 240 pixels
|
||||||
let display_aspect_ratio = (info.width() as f64 * info.par().numer() as f64)
|
let display_aspect_ratio = (frame.width() as f64 * info.par().numer() as f64)
|
||||||
/ (info.height() as f64 * info.par().denom() as f64);
|
/ (frame.height() as f64 * info.par().denom() as f64);
|
||||||
let target_height = 240;
|
let target_height = 240;
|
||||||
let target_width = target_height as f64 * display_aspect_ratio;
|
let target_width = target_height as f64 * display_aspect_ratio;
|
||||||
|
|
||||||
// Create an ImageBuffer around the borrowed video frame data from GStreamer.
|
// Create a FlatSamples around the borrowed video frame data from GStreamer with
|
||||||
let img = image::ImageBuffer::<image::Rgba<u8>, _>::from_raw(
|
// the correct stride as provided by GStreamer.
|
||||||
info.width(),
|
let img = image::FlatSamples::<&[u8]> {
|
||||||
info.height(),
|
samples: frame.plane_data(0).unwrap(),
|
||||||
map,
|
layout: image::flat::SampleLayout {
|
||||||
)
|
channels: 3, // RGB
|
||||||
.expect("Failed to create ImageBuffer, probably a stride mismatch");
|
channel_stride: 1, // 1 byte from component to component
|
||||||
|
width: frame.width(),
|
||||||
|
width_stride: 4, // 4 byte from pixel to pixel
|
||||||
|
height: frame.height(),
|
||||||
|
height_stride: frame.plane_stride()[0] as usize, // stride from line to line
|
||||||
|
},
|
||||||
|
color_hint: Some(image::ColorType::Rgb8),
|
||||||
|
};
|
||||||
|
|
||||||
// Scale image to our target dimensions
|
// Scale image to our target dimensions
|
||||||
let scaled_img =
|
let scaled_img = image::imageops::thumbnail(
|
||||||
image::imageops::thumbnail(&img, target_width as u32, target_height as u32);
|
&img.as_view::<image::Rgb<u8>>()
|
||||||
|
.expect("couldn't create image view"),
|
||||||
|
target_width as u32,
|
||||||
|
target_height as u32,
|
||||||
|
);
|
||||||
|
|
||||||
// Save it at the specific location. This automatically detects the file type
|
// Save it at the specific location. This automatically detects the file type
|
||||||
// based on the filename.
|
// based on the filename.
|
||||||
|
|
Loading…
Reference in a new issue