ffv1dec: Add support for >8 bit color formats

This commit is contained in:
Sebastian Dröge 2021-09-18 11:27:35 +03:00
parent 08229402cd
commit 600d217e7d
2 changed files with 149 additions and 93 deletions

View file

@ -8,13 +8,14 @@ description = "FFV1 Decoder Plugin"
edition = "2018"
[dependencies]
byte-slice-cast = "1"
ffv1 = { git = "https://github.com/rust-av/ffv1.git", rev = "2afb025a327173ce891954c052e804d0f880368a" }
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_12"] }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_12"] }
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_18"] }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_18"] }
once_cell = "1.0"
[dev-dependencies]
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_12"] }
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_18"] }
[lib]
name = "gstffv1"
crate-type = ["cdylib", "rlib"]

View file

@ -43,40 +43,40 @@ pub struct Ffv1Dec {
fn get_all_video_formats() -> Vec<glib::SendValue> {
let values = [
VideoFormat::Gray8,
// VideoFormat::Gray16Le,
// VideoFormat::Gray16Be,
VideoFormat::Gray16Le,
VideoFormat::Gray16Be,
VideoFormat::Y444,
// VideoFormat::Y44410le,
// VideoFormat::Y44410be,
// VideoFormat::A44410le,
// VideoFormat::A44410be,
// VideoFormat::Y44412le,
// VideoFormat::Y44412be,
// VideoFormat::Y44416le,
// VideoFormat::Y44416be,
VideoFormat::Y44410le,
VideoFormat::Y44410be,
VideoFormat::A44410le,
VideoFormat::A44410be,
VideoFormat::Y44412le,
VideoFormat::Y44412be,
VideoFormat::Y44416le,
VideoFormat::Y44416be,
VideoFormat::A420,
VideoFormat::Y42b,
// VideoFormat::I42210le,
// VideoFormat::I42210be,
// VideoFormat::A42210le,
// VideoFormat::A42210be,
// VideoFormat::I42212le,
// VideoFormat::I42212be,
VideoFormat::I42210le,
VideoFormat::I42210be,
VideoFormat::A42210le,
VideoFormat::A42210be,
VideoFormat::I42212le,
VideoFormat::I42212be,
VideoFormat::I420,
// VideoFormat::I42010le,
// VideoFormat::I42010be,
// VideoFormat::I42012le,
// VideoFormat::I42012be,
VideoFormat::I42010le,
VideoFormat::I42010be,
VideoFormat::I42012le,
VideoFormat::I42012be,
VideoFormat::Gbra,
VideoFormat::Gbr,
// VideoFormat::Gbr10le,
// VideoFormat::Gbr10be,
// VideoFormat::Gbra10le,
// VideoFormat::Gbra10be,
// VideoFormat::Gbr12le,
// VideoFormat::Gbr12be,
// VideoFormat::Gbra12le,
// VideoFormat::Gbra12be,
VideoFormat::Gbr10le,
VideoFormat::Gbr10be,
VideoFormat::Gbra10le,
VideoFormat::Gbra10be,
VideoFormat::Gbr12le,
VideoFormat::Gbr12be,
VideoFormat::Gbra12le,
VideoFormat::Gbra12be,
];
values.iter().map(|i| i.to_str().to_send_value()).collect()
@ -96,33 +96,33 @@ fn get_output_format(record: &ConfigRecord) -> Option<VideoFormat> {
) {
// Interpret luma-only as grayscale
(false, _, _, 8, false, _) => Some(VideoFormat::Gray8),
// (false, _, _, 16, false, true) => Some(VideoFormat::Gray16Le),
// (false, _, _, 16, false, false) => Some(VideoFormat::Gray16Be),
(false, _, _, 16, false, true) => Some(VideoFormat::Gray16Le),
(false, _, _, 16, false, false) => Some(VideoFormat::Gray16Be),
// 4:4:4
(true, 4, 4, 8, false, _) => Some(VideoFormat::Y444),
// (true, 4, 4, 10, false, true) => Some(VideoFormat::Y44410le),
// (true, 4, 4, 10, false, false) => Some(VideoFormat::Y44410be),
// (true, 4, 4, 10, true, true) => Some(VideoFormat::A44410le),
// (true, 4, 4, 10, true, false) => Some(VideoFormat::A44410be),
// (true, 4, 4, 12, false, true) => Some(VideoFormat::Y44412le),
// (true, 4, 4, 12, false, false) => Some(VideoFormat::Y44412be),
// (true, 4, 4, 16, false, true) => Some(VideoFormat::Y44416le),
// (true, 4, 4, 16, false, false) => Some(VideoFormat::Y44416be),
(true, 4, 4, 10, false, true) => Some(VideoFormat::Y44410le),
(true, 4, 4, 10, false, false) => Some(VideoFormat::Y44410be),
(true, 4, 4, 10, true, true) => Some(VideoFormat::A44410le),
(true, 4, 4, 10, true, false) => Some(VideoFormat::A44410be),
(true, 4, 4, 12, false, true) => Some(VideoFormat::Y44412le),
(true, 4, 4, 12, false, false) => Some(VideoFormat::Y44412be),
(true, 4, 4, 16, false, true) => Some(VideoFormat::Y44416le),
(true, 4, 4, 16, false, false) => Some(VideoFormat::Y44416be),
// 4:2:2
(true, 2, 2, 8, false, _) => Some(VideoFormat::Y42b),
// (true, 2, 2, 10, false, true) => Some(VideoFormat::I42210le),
// (true, 2, 2, 10, false, false) => Some(VideoFormat::I42210be),
// (true, 2, 2, 10, true, true) => Some(VideoFormat::A42210le),
// (true, 2, 2, 10, true, false) => Some(VideoFormat::A42210be),
// (true, 2, 2, 12, false, true) => Some(VideoFormat::I42212le),
// (true, 2, 2, 12, false, false) => Some(VideoFormat::I42212be),
(true, 2, 2, 10, false, true) => Some(VideoFormat::I42210le),
(true, 2, 2, 10, false, false) => Some(VideoFormat::I42210be),
(true, 2, 2, 10, true, true) => Some(VideoFormat::A42210le),
(true, 2, 2, 10, true, false) => Some(VideoFormat::A42210be),
(true, 2, 2, 12, false, true) => Some(VideoFormat::I42212le),
(true, 2, 2, 12, false, false) => Some(VideoFormat::I42212be),
// 4:2:0
(true, 1, 1, 8, false, _) => Some(VideoFormat::I420),
(true, 1, 1, 8, true, _) => Some(VideoFormat::A420),
// (true, 1, 1, 10, false, true) => Some(VideoFormat::I42010le),
// (true, 1, 1, 10, false, false) => Some(VideoFormat::I42010be),
// (true, 1, 1, 12, false, true) => Some(VideoFormat::I42012le),
// (true, 1, 1, 12, false, false) => Some(VideoFormat::I42012be),
(true, 1, 1, 10, false, true) => Some(VideoFormat::I42010le),
(true, 1, 1, 10, false, false) => Some(VideoFormat::I42010be),
(true, 1, 1, 12, false, true) => Some(VideoFormat::I42012le),
(true, 1, 1, 12, false, false) => Some(VideoFormat::I42012be),
// Nothing matched
(_, _, _, _, _, _) => None,
},
@ -133,14 +133,15 @@ fn get_output_format(record: &ConfigRecord) -> Option<VideoFormat> {
) {
(8, true, _) => Some(VideoFormat::Gbra),
(8, false, _) => Some(VideoFormat::Gbr),
// (10, false, true) => Some(VideoFormat::Gbr10le),
// (10, false, false) => Some(VideoFormat::Gbr10be),
// (10, true, true) => Some(VideoFormat::Gbra10le),
// (10, true, false) => Some(VideoFormat::Gbra10be),
// (12, false, true) => Some(VideoFormat::Gbr12le),
// (12, false, false) => Some(VideoFormat::Gbr12be),
// (12, true, true) => Some(VideoFormat::Gbra12le),
// (12, true, false) => Some(VideoFormat::Gbra12be),
(10, false, true) => Some(VideoFormat::Gbr10le),
(10, false, false) => Some(VideoFormat::Gbr10be),
(10, true, true) => Some(VideoFormat::Gbra10le),
(10, true, false) => Some(VideoFormat::Gbra10be),
(12, false, true) => Some(VideoFormat::Gbr12le),
(12, false, false) => Some(VideoFormat::Gbr12be),
(12, true, true) => Some(VideoFormat::Gbra12le),
(12, true, false) => Some(VideoFormat::Gbra12be),
// Nothing matched
(_, _, _) => None,
},
_ => panic!("Unknown color_space type"),
@ -148,7 +149,6 @@ fn get_output_format(record: &ConfigRecord) -> Option<VideoFormat> {
}
impl Ffv1Dec {
// FIXME: Implement other pixel depths
pub fn get_decoded_frame(
&self,
mut decoded_frame: Frame,
@ -159,49 +159,104 @@ impl Ffv1Dec {
let mut_buf = buf.make_mut();
let format_info = output_info.format_info();
// Greater depths are not yet supported
assert_eq!(decoded_frame.bit_depth, 8);
let mut offsets = vec![];
let mut strides = vec![];
let mut acc_offset = 0;
for (plane, decoded_plane) in decoded_frame.buf.drain(..).enumerate() {
let component = format_info
.plane()
.iter()
.position(|&p| p == plane as u32)
.unwrap() as u8;
if decoded_frame.bit_depth == 8 {
for (plane, decoded_plane) in decoded_frame.buf.drain(..).enumerate() {
let component = format_info
.plane()
.iter()
.position(|&p| p == plane as u32)
.unwrap() as u8;
let comp_height = format_info.scale_height(component, output_info.height()) as usize;
let src_stride = decoded_plane.len() / comp_height;
let dest_stride = output_info.stride()[plane] as usize;
let comp_height =
format_info.scale_height(component, output_info.height()) as usize;
let src_stride = decoded_plane.len() / comp_height;
let dest_stride = output_info.stride()[plane] as usize;
let mem = if video_meta_supported || src_stride == dest_stride {
// Just wrap the decoded frame vecs and push them out
gst::Memory::from_mut_slice(decoded_plane)
} else {
// Mismatched stride, let's copy
let out_plane = gst::Memory::with_size(dest_stride * comp_height);
let mut out_plane_mut = out_plane.into_mapped_memory_writable().unwrap();
let mem = if video_meta_supported || src_stride == dest_stride {
// Just wrap the decoded frame vecs and push them out
gst::Memory::from_mut_slice(decoded_plane)
} else {
// Mismatched stride, let's copy
let out_plane = gst::Memory::with_size(dest_stride * comp_height);
let mut out_plane_mut = out_plane.into_mapped_memory_writable().unwrap();
for (in_line, out_line) in decoded_plane
.as_slice()
.chunks_exact(src_stride)
.zip(out_plane_mut.as_mut_slice().chunks_exact_mut(dest_stride))
{
out_line[..src_stride].copy_from_slice(in_line);
}
for (in_line, out_line) in decoded_plane
.as_slice()
.chunks_exact(src_stride)
.zip(out_plane_mut.as_mut_slice().chunks_exact_mut(dest_stride))
{
out_line[..src_stride].copy_from_slice(in_line);
}
out_plane_mut.into_memory()
};
out_plane_mut.into_memory()
};
let mem_size = mem.size();
mut_buf.append_memory(mem);
let mem_size = mem.size();
mut_buf.append_memory(mem);
strides.push(src_stride as i32);
offsets.push(acc_offset);
acc_offset += mem_size;
strides.push(src_stride as i32);
offsets.push(acc_offset);
acc_offset += mem_size;
}
} else if decoded_frame.bit_depth <= 16 {
use byte_slice_cast::{AsByteSlice, AsMutByteSlice};
for (plane, decoded_plane) in decoded_frame.buf16.drain(..).enumerate() {
let component = format_info
.plane()
.iter()
.position(|&p| p == plane as u32)
.unwrap() as u8;
let comp_height =
format_info.scale_height(component, output_info.height()) as usize;
let src_stride = (decoded_plane.len() * 2) / comp_height;
let dest_stride = output_info.stride()[plane] as usize;
let mem = if video_meta_supported || src_stride == dest_stride {
struct WrappedVec16(Vec<u16>);
impl AsRef<[u8]> for WrappedVec16 {
fn as_ref(&self) -> &[u8] {
self.0.as_byte_slice()
}
}
impl AsMut<[u8]> for WrappedVec16 {
fn as_mut(&mut self) -> &mut [u8] {
self.0.as_mut_byte_slice()
}
}
// Just wrap the decoded frame vecs and push them out
gst::Memory::from_mut_slice(WrappedVec16(decoded_plane))
} else {
// Mismatched stride, let's copy
let out_plane = gst::Memory::with_size(dest_stride * comp_height);
let mut out_plane_mut = out_plane.into_mapped_memory_writable().unwrap();
for (in_line, out_line) in decoded_plane
.as_slice()
.as_byte_slice()
.chunks_exact(src_stride)
.zip(out_plane_mut.as_mut_slice().chunks_exact_mut(dest_stride))
{
out_line[..src_stride].copy_from_slice(in_line);
}
out_plane_mut.into_memory()
};
let mem_size = mem.size();
mut_buf.append_memory(mem);
strides.push(src_stride as i32);
offsets.push(acc_offset);
acc_offset += mem_size;
}
} else {
unimplemented!("Bit depth {} not supported yet", decoded_frame.bit_depth);
}
if video_meta_supported {