gif: Add initial implementation of a gif encoder

- Implemented a simple gif encoder based on the rust crate "gif".
- Currently supported input pixel formats are RGB and RGBA
- The encoder dynamically changes frame delays to approximate the actual
  input framerate
- For the moment, each frame uses its own local colorpalette, leading to
  good image quality, but big files
- Every frame is currently a full frame. No incremental frames for now
- The produced GIF is currently compressed (LZW)
This commit is contained in:
Markus Ebner 2020-02-24 16:38:05 +01:00 committed by Sebastian Dröge
parent 4dac77bb93
commit 750b29b76c
8 changed files with 650 additions and 0 deletions

1
.gitignore vendored
View file

@ -3,3 +3,4 @@ target
*~
*.bk
*.swp
.vscode

View file

@ -18,6 +18,7 @@ members = [
"gst-plugin-fallbackswitch",
"gst-plugin-lewton",
"gst-plugin-claxon",
"gst-plugin-gif",
]
[profile.release]

28
gst-plugin-gif/Cargo.toml Normal file
View file

@ -0,0 +1,28 @@
[package]
name = "gst-plugin-gif"
version = "0.1.0"
authors = ["Markus Ebner <info@ebner-markus.de>"]
repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugin-rs"
license = "MIT/Apache-2.0"
edition = "2018"
[dependencies]
glib = { git = "https://github.com/gtk-rs/glib" }
gstreamer = { git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gstreamer-video = { git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gstreamer-check = { git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gif = "0.10"
atomic_refcell = "0.1"
once_cell = "1"
[lib]
name = "gstgif"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[[example]]
name = "testvideosrc2gif"
path = "examples/testvideosrc2gif.rs"
[build-dependencies]
gst-plugin-version-helper = { path="../gst-plugin-version-helper" }

5
gst-plugin-gif/build.rs Normal file
View file

@ -0,0 +1,5 @@
extern crate gst_plugin_version_helper;
fn main() {
gst_plugin_version_helper::get_info()
}

View file

@ -0,0 +1,54 @@
// Copyright (C) 2020 Markus Ebner <info@ebner-markus.de>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Library General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public
// License along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
// Boston, MA 02110-1335, USA.
extern crate gstreamer as gst;
use gst::prelude::*;
const ENCODE_PIPELINE: &str = "videotestsrc is-live=false num-buffers=100 ! videoconvert ! gifenc ! filesink location=test.gif";
fn main() {
gst::init().unwrap();
gstgif::plugin_register_static().expect("Failed to register gif plugin");
let pipeline = gst::parse_launch(ENCODE_PIPELINE).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Failed to set pipeline state to playing");
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Failed to set pipeline state to null");
}

View file

@ -0,0 +1,456 @@
// Copyright (C) 2020 Markus Ebner <info@ebner-markus.de>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use atomic_refcell::AtomicRefCell;
use gif::SetParameter;
use glib::subclass;
use glib::subclass::prelude::*;
use gst::subclass::prelude::*;
use gst_video::prelude::*;
use gst_video::subclass::prelude::*;
use gstreamer_video as gst_video;
use once_cell::sync::Lazy;
use std::{
io,
io::Write,
sync::{Arc, Mutex},
};
const DEFAULT_REPEAT: i32 = 0;
/// The gif::Encoder requires a std::io::Write implementation, to which it
/// can save the generated gif. This struct is used as a temporary cache, into
/// which the encoder can write encoded frames, such that we can read them back
/// and commit them to the gstreamer pipeline.
struct CacheBuffer {
buffer: AtomicRefCell<Vec<u8>>,
}
impl CacheBuffer {
pub fn new() -> Self {
Self {
buffer: AtomicRefCell::new(Vec::new()),
}
}
pub fn clear(&self) {
self.buffer.borrow_mut().clear();
}
pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
let mut buffer = self.buffer.borrow_mut();
buffer.write(buf)
}
pub fn consume(&self) -> Vec<u8> {
let mut buffer = self.buffer.borrow_mut();
std::mem::replace(&mut *buffer, Vec::new())
}
}
/// Writer for a CacheBuffer instance. This class is passed to the gif::Encoder.
/// Everything written to the CacheBufferWriter is stored in the underlying CacheBuffer.
struct CacheBufferWriter {
cache_buffer: Arc<CacheBuffer>,
}
impl CacheBufferWriter {
pub fn new(cache_buffer: Arc<CacheBuffer>) -> Self {
Self { cache_buffer }
}
}
impl Write for CacheBufferWriter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.cache_buffer.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[derive(Debug, Clone, Copy)]
struct Settings {
repeat: i32,
}
impl Default for Settings {
fn default() -> Self {
Settings {
repeat: DEFAULT_REPEAT,
}
}
}
static PROPERTIES: [subclass::Property; 1] = [subclass::Property("repeat", |name| {
glib::ParamSpec::int(
name,
"Repeat",
"Repeat (-1 to loop forever, 0 .. n finite repetitions)",
-1,
std::u16::MAX as i32,
DEFAULT_REPEAT,
glib::ParamFlags::READWRITE,
)
})];
struct State {
video_info: gst_video::VideoInfo,
cache: Arc<CacheBuffer>,
gif_pts: Option<gst::ClockTime>,
last_actual_pts: gst::ClockTime,
context: Option<gif::Encoder<CacheBufferWriter>>,
}
impl State {
pub fn new(video_info: gst_video::VideoInfo) -> Self {
Self {
video_info,
cache: Arc::new(CacheBuffer::new()),
gif_pts: None,
last_actual_pts: gst::ClockTime::none(),
context: None,
}
}
pub fn reset(&mut self, settings: Settings) {
self.cache.clear();
self.gif_pts = None;
self.last_actual_pts = gst::ClockTime::none();
// initialize and configure encoder with a CacheBufferWriter pointing
// to our CacheBuffer instance
let mut encoder = gif::Encoder::new(
CacheBufferWriter::new(self.cache.clone()),
self.video_info.width() as u16,
self.video_info.height() as u16,
&[],
)
.expect("Failed to initialize GIF encoder");
match settings.repeat {
-1 => encoder.set(gif::Repeat::Infinite),
_ => encoder.set(gif::Repeat::Finite(settings.repeat as u16)),
}
.expect("Failed to configure encoder");
self.context = Some(encoder);
}
}
struct GifEnc {
state: AtomicRefCell<Option<State>>,
settings: Mutex<Settings>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new("gifenc", gst::DebugColorFlags::empty(), Some("GIF encoder"))
});
impl ObjectSubclass for GifEnc {
const NAME: &'static str = "GifEnc";
type ParentType = gst_video::VideoEncoder;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib_object_subclass!();
fn new() -> Self {
Self {
state: AtomicRefCell::new(None),
settings: Mutex::new(Default::default()),
}
}
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"GIF encoder",
"Encoder/Video",
"GIF encoder",
"Markus Ebner <info@ebner-markus.de>",
);
let sink_caps = gst::Caps::new_simple(
"video/x-raw",
&[
(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Rgb.to_str(),
&gst_video::VideoFormat::Rgba.to_str(),
]),
),
("width", &gst::IntRange::<i32>::new(1, std::u16::MAX as i32)),
(
"height",
&gst::IntRange::<i32>::new(1, std::u16::MAX as i32),
),
(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(1, 1),
// frame-delay timing in gif is a multiple of 10ms -> max 100fps
gst::Fraction::new(100, 1),
),
),
],
);
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&sink_caps,
)
.unwrap();
klass.add_pad_template(sink_pad_template);
let src_caps = gst::Caps::new_simple("image/gif", &[]);
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&src_caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
}
impl ObjectImpl for GifEnc {
glib_object_impl!();
fn set_property(&self, _obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("repeat", ..) => {
let mut settings = self.settings.lock().unwrap();
settings.repeat = value.get_some().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("repeat", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.repeat.to_value())
}
_ => unimplemented!(),
}
}
}
impl ElementImpl for GifEnc {}
impl VideoEncoderImpl for GifEnc {
fn stop(&self, _element: &gst_video::VideoEncoder) -> Result<(), gst::ErrorMessage> {
*self.state.borrow_mut() = None;
Ok(())
}
fn set_format(
&self,
element: &gst_video::VideoEncoder,
state: &gst_video::VideoCodecState<'static, gst_video::video_codec_state::Readable>,
) -> Result<(), gst::LoggableError> {
self.flush_encoder(element)
.map_err(|_| gst_loggable_error!(CAT, "Failed to drain"))?;
let video_info = state.get_info();
gst_debug!(CAT, obj: element, "Setting format {:?}", video_info);
{
let mut state = State::new(video_info);
let settings = self.settings.lock().unwrap();
state.reset(*settings);
*self.state.borrow_mut() = Some(state);
}
let output_state = element
.set_output_state(gst::Caps::new_simple("image/gif", &[]), Some(state))
.map_err(|_| gst_loggable_error!(CAT, "Failed to set output state"))?;
element
.negotiate(output_state)
.map_err(|_| gst_loggable_error!(CAT, "Failed to negotiate"))?;
self.parent_set_format(element, state)
}
fn finish(
&self,
element: &gst_video::VideoEncoder,
) -> Result<gst::FlowSuccess, gst::FlowError> {
self.flush_encoder(element)
}
fn handle_frame(
&self,
element: &gst_video::VideoEncoder,
mut frame: gst_video::VideoCodecFrame,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state_guard = self.state.borrow_mut();
let state = state_guard.as_mut().ok_or(gst::FlowError::NotNegotiated)?;
gst_debug!(
CAT,
obj: element,
"Sending frame {}",
frame.get_system_frame_number()
);
let input_buffer = frame
.get_input_buffer()
.expect("frame without input buffer");
{
let in_frame = gst_video::VideoFrameRef::from_buffer_ref_readable(
&*input_buffer,
&state.video_info,
)
.map_err(|_| {
gst_element_error!(
element,
gst::CoreError::Failed,
["Failed to map output buffer readable"]
);
gst::FlowError::Error
})?;
let frame_width = in_frame.info().width();
let frame_height = in_frame.info().height();
// Calculate delay to new frame by calculating the difference between the current actual
// presentation timestamp of the last frame within the gif, and the pts of the new frame.
// This results in variable frame delays in the gif - but an overall constant fps.
state.last_actual_pts = in_frame.buffer().get_pts();
if state.gif_pts.is_none() {
// First frame: use pts of first input frame as origin
state.gif_pts = Some(in_frame.buffer().get_pts());
}
let frame_delay = in_frame.buffer().get_pts() - state.gif_pts.unwrap();
if frame_delay.is_none() {
gst_element_error!(
element,
gst::CoreError::Failed,
["No PTS set on input frame. Unable to calculate proper frame timing."]
);
return Err(gst::FlowError::Error);
}
let mut raw_frame = get_tightly_packed_framebuffer(&in_frame);
let mut gif_frame = match in_frame.info().format() {
gst_video::VideoFormat::Rgb => {
gif::Frame::from_rgb_speed(
frame_width as u16,
frame_height as u16,
&raw_frame,
10, // TODO: Export option for this quality/speed tradeoff
)
}
gst_video::VideoFormat::Rgba => {
gif::Frame::from_rgba_speed(
frame_width as u16,
frame_height as u16,
&mut raw_frame,
10, //TODO: Export option for this quality/speed tradeoff
)
}
_ => unreachable!(),
};
// apply encoding settings to frame (gif uses multiples of 10ms as frame_delay)
// use float arithmetic with rounding for this calculation, since small stuttering
// is probably less visible than the large stuttering when a complete 10ms have to
// "catch up".
gif_frame.delay = (frame_delay.mseconds().unwrap() as f32 / 10.0).round() as u16;
state.gif_pts.replace(
state.gif_pts.unwrap() + gst::ClockTime::from_mseconds(gif_frame.delay as u64 * 10),
);
// encode new frame
let context = state.context.as_mut().unwrap();
if let Err(e) = context.write_frame(&gif_frame) {
gst_element_error!(element, gst::CoreError::Failed, [&e.to_string()]);
return Err(gst::FlowError::Error);
}
}
// The encoder directly outputs one frame for each input frame
// Since the output is directly available, we can re-use the input frame
// to push results to the pipeline
let buffer = state.cache.consume();
// Avoid keeping the state locked while calling finish_frame()
drop(state_guard);
let output_buffer = gst::Buffer::from_mut_slice(buffer);
// Currently not using incremental frames -> every frame is a keyframe
frame.set_flags(gst_video::VideoCodecFrameFlags::SYNC_POINT);
frame.set_output_buffer(output_buffer);
element.finish_frame(Some(frame))
}
}
impl GifEnc {
fn flush_encoder(
&self,
element: &gst_video::VideoEncoder,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_debug!(CAT, obj: element, "Flushing");
let trailer_buffer = self.state.borrow_mut().as_mut().map(|state| {
// Drop encoder to flush and take flushed data (gif trailer)
state.context = None;
let buffer = state.cache.consume();
// reset internal state
let settings = self.settings.lock().unwrap();
// manually produce a
let mut trailer_buffer = gst::Buffer::from_mut_slice(buffer);
{
let trailer_buffer = trailer_buffer.get_mut().unwrap();
trailer_buffer.set_pts(state.last_actual_pts);
trailer_buffer.set_flags(gst::BufferFlags::LAST);
trailer_buffer.set_flags(gst::BufferFlags::NON_DROPPABLE);
}
// Initialize the encoder again, to be ready for a new round without format change
state.reset(*settings);
// return the constructed buffer containing the gif trailer
trailer_buffer
});
if let Some(trailer_buffer) = trailer_buffer {
// manually push GIF trailer to the encoder's src pad
let srcpad = element.get_static_pad("src").unwrap();
srcpad.push(trailer_buffer)?;
}
Ok(gst::FlowSuccess::Ok)
}
}
/// Helper method that takes a gstreamer video-frame and copies it into a
/// tightly packed rgb(a) buffer, ready for consumption by the gif encoder.
fn get_tightly_packed_framebuffer(frame: &gst_video::VideoFrameRef<&gst::BufferRef>) -> Vec<u8> {
assert_eq!(frame.n_planes(), 1); // RGB and RGBA are tightly packed
let line_size = (frame.info().width() * frame.n_components()) as usize;
let line_stride = frame.info().stride()[0] as usize;
let mut raw_frame: Vec<u8> = Vec::with_capacity(line_size * frame.info().height() as usize);
// copy gstreamer frame to tightly packed rgb(a) frame.
frame
.plane_data(0)
.unwrap()
.chunks_exact(line_stride)
.map(|padded_line| &padded_line[..line_size])
.for_each(|line| raw_frame.extend_from_slice(line));
raw_frame
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"gifenc",
gst::Rank::Primary,
GifEnc::get_type(),
)
}

30
gst-plugin-gif/src/lib.rs Normal file
View file

@ -0,0 +1,30 @@
// Copyright (C) 2020 Markus Ebner <info@ebner-markus.de>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate glib;
#[macro_use]
extern crate gstreamer as gst;
mod gifenc;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gifenc::register(plugin)
}
gst_plugin_define!(
gifenc,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"MIT/X11",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -0,0 +1,75 @@
// Copyright (C) 2020 Markus Ebner <info@ebner-markus.de>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate glib;
extern crate gstreamer as gst;
extern crate gstreamer_check as gst_check;
extern crate gstreamer_video as gst_video;
extern crate gstgif;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstgif::plugin_register_static().expect("gif test");
});
}
#[test]
fn test_encode_rgba() {
init();
let video_info = gst_video::VideoInfo::new(gst_video::VideoFormat::Rgba, 160, 120)
.fps((30, 1))
.build()
.unwrap();
test_encode(&video_info);
}
#[test]
fn test_encode_rgb() {
init();
let video_info = gst_video::VideoInfo::new(gst_video::VideoFormat::Rgb, 160, 120)
.fps((30, 1))
.build()
.unwrap();
test_encode(&video_info);
}
fn test_encode(video_info: &gst_video::VideoInfo) {
let mut h = gst_check::Harness::new("gifenc");
h.set_src_caps(video_info.to_caps().unwrap());
for pts in 0..5 {
let buffer = {
let mut buffer = gst::Buffer::with_size(video_info.size()).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(gst::ClockTime::from_seconds(pts));
}
let mut vframe =
gst_video::VideoFrame::from_buffer_writable(buffer, &video_info).unwrap();
for v in vframe.plane_data_mut(0).unwrap() {
*v = 128;
}
vframe.into_buffer()
};
h.push(buffer.clone()).unwrap();
}
h.push_event(gst::Event::new_eos().build());
for _ in 0..6 {
// last frame is the GIF trailer
let buffer = h.pull().unwrap();
// Currently, every frame should be a full frame
assert!(!buffer.get_flags().contains(gst::BufferFlags::DELTA_UNIT))
}
}