diff --git a/ext/vp8/Makefile.am b/ext/vp8/Makefile.am index 5985b0bbe6..09b8d5c0be 100644 --- a/ext/vp8/Makefile.am +++ b/ext/vp8/Makefile.am @@ -8,13 +8,11 @@ libgstvp8_la_SOURCES = \ plugin.c libgstvp8_la_CFLAGS = \ - $(GST_PLUGINS_BAD_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) \ $(GST_BASE_CFLAGS) \ $(GST_CFLAGS) \ -DGST_USE_UNSTABLE_API libgstvp8_la_LIBADD = \ - $(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_API_VERSION@.la \ $(GST_PLUGINS_BASE_LIBS) -lgsttag-@GST_API_VERSION@ -lgstvideo-@GST_API_VERSION@ \ $(GST_BASE_LIBS) $(GST_LIBS) $(VPX_LIBS) libgstvp8_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c index f00347a7bb..71683c1781 100644 --- a/ext/vp8/gstvp8dec.c +++ b/ext/vp8/gstvp8dec.c @@ -96,15 +96,13 @@ static void gst_vp8_dec_set_property (GObject * object, guint prop_id, static void gst_vp8_dec_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec); -static gboolean gst_vp8_dec_start (GstBaseVideoDecoder * decoder); -static gboolean gst_vp8_dec_stop (GstBaseVideoDecoder * decoder); -static gboolean gst_vp8_dec_set_format (GstBaseVideoDecoder * decoder, - GstVideoState * state); -static gboolean gst_vp8_dec_reset (GstBaseVideoDecoder * decoder); -static GstFlowReturn gst_vp8_dec_parse_data (GstBaseVideoDecoder * decoder, - gboolean at_eos); -static GstFlowReturn gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, - GstVideoFrameState * frame); +static gboolean gst_vp8_dec_start (GstVideoDecoder * decoder); +static gboolean gst_vp8_dec_stop (GstVideoDecoder * decoder); +static gboolean gst_vp8_dec_set_format (GstVideoDecoder * decoder, + GstVideoCodecState * state); +static gboolean gst_vp8_dec_reset (GstVideoDecoder * decoder, gboolean hard); +static GstFlowReturn gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame); static GstStaticPadTemplate gst_vp8_dec_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", @@ -117,22 +115,36 @@ static GstStaticPadTemplate gst_vp8_dec_src_template = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) ); -#define gst_vp8_dec_parent_class parent_class -G_DEFINE_TYPE (GstVP8Dec, gst_vp8_dec, GST_TYPE_BASE_VIDEO_DECODER); +GST_BOILERPLATE (GstVP8Dec, gst_vp8_dec, GstVideoDecoder, + GST_TYPE_VIDEO_DECODER); + +static void +gst_vp8_dec_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_static_pad_template (element_class, + &gst_vp8_dec_src_template); + gst_element_class_add_static_pad_template (element_class, + &gst_vp8_dec_sink_template); + + gst_element_class_set_details_simple (element_class, + "On2 VP8 Decoder", + "Codec/Decoder/Video", + "Decode VP8 video streams", "David Schleef "); +} static void gst_vp8_dec_class_init (GstVP8DecClass * klass) { GObjectClass *gobject_class; - GstElementClass *element_class; - GstBaseVideoDecoderClass *base_video_decoder_class; + GstVideoDecoderClass *base_video_decoder_class; gobject_class = G_OBJECT_CLASS (klass); - base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass); - element_class = GST_ELEMENT_CLASS (klass); + base_video_decoder_class = GST_VIDEO_DECODER_CLASS (klass); gobject_class->set_property = gst_vp8_dec_set_property; gobject_class->get_property = gst_vp8_dec_get_property; @@ -160,23 +172,11 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) 0, 16, DEFAULT_NOISE_LEVEL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_dec_src_template)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_dec_sink_template)); - - gst_element_class_set_details_simple (element_class, - "On2 VP8 Decoder", - "Codec/Decoder/Video", - "Decode VP8 video streams", "David Schleef "); - base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vp8_dec_start); base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vp8_dec_stop); base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_vp8_dec_reset); base_video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_vp8_dec_set_format); - base_video_decoder_class->parse_data = - GST_DEBUG_FUNCPTR (gst_vp8_dec_parse_data); base_video_decoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_vp8_dec_handle_frame); @@ -184,12 +184,12 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) } static void -gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec) +gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec, GstVP8DecClass * klass) { - GstBaseVideoDecoder *decoder = (GstBaseVideoDecoder *) gst_vp8_dec; + GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vp8_dec; GST_DEBUG_OBJECT (gst_vp8_dec, "gst_vp8_dec_init"); - decoder->packetized = TRUE; + gst_video_decoder_set_packetized (decoder, TRUE); gst_vp8_dec->post_processing = DEFAULT_POST_PROCESSING; gst_vp8_dec->post_processing_flags = DEFAULT_POST_PROCESSING_FLAGS; gst_vp8_dec->deblocking_level = DEFAULT_DEBLOCKING_LEVEL; @@ -254,7 +254,7 @@ gst_vp8_dec_get_property (GObject * object, guint prop_id, GValue * value, } static gboolean -gst_vp8_dec_start (GstBaseVideoDecoder * decoder) +gst_vp8_dec_start (GstVideoDecoder * decoder) { GstVP8Dec *gst_vp8_dec = GST_VP8_DEC (decoder); @@ -265,7 +265,7 @@ gst_vp8_dec_start (GstBaseVideoDecoder * decoder) } static gboolean -gst_vp8_dec_stop (GstBaseVideoDecoder * base_video_decoder) +gst_vp8_dec_stop (GstVideoDecoder * base_video_decoder) { GstVP8Dec *gst_vp8_dec = GST_VP8_DEC (base_video_decoder); @@ -277,18 +277,22 @@ gst_vp8_dec_stop (GstBaseVideoDecoder * base_video_decoder) } static gboolean -gst_vp8_dec_set_format (GstBaseVideoDecoder * decoder, GstVideoState * state) +gst_vp8_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) { GstVP8Dec *gst_vp8_dec = GST_VP8_DEC (decoder); GST_DEBUG_OBJECT (gst_vp8_dec, "set_format"); gst_vp8_dec->decoder_inited = FALSE; + if (gst_vp8_dec->input_state) + gst_video_codec_state_unref (gst_vp8_dec->input_state); + gst_vp8_dec->input_state = gst_video_codec_state_ref (state); + return TRUE; } static gboolean -gst_vp8_dec_reset (GstBaseVideoDecoder * base_video_decoder) +gst_vp8_dec_reset (GstVideoDecoder * base_video_decoder, gboolean hard) { GstVP8Dec *decoder; @@ -303,61 +307,123 @@ gst_vp8_dec_reset (GstBaseVideoDecoder * base_video_decoder) return TRUE; } -static GstFlowReturn -gst_vp8_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos) -{ - return GST_FLOW_OK; -} - static void gst_vp8_dec_send_tags (GstVP8Dec * dec) { GstTagList *list; - list = gst_tag_list_new_empty (); + list = gst_tag_list_new (); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_VIDEO_CODEC, "VP8 video", NULL); - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (dec), - gst_event_new_tag (list)); + gst_element_found_tags_for_pad (GST_ELEMENT (dec), + GST_VIDEO_DECODER_SRC_PAD (dec), list); } static void gst_vp8_dec_image_to_buffer (GstVP8Dec * dec, const vpx_image_t * img, GstBuffer * buffer) { - int deststride, srcstride, height, width, line, comp; - guint8 *dest, *src; - GstVideoFrame frame; - GstVideoInfo *info = &GST_BASE_VIDEO_CODEC (dec)->info; + int stride, w, h, i; + guint8 *d; + GstVideoInfo *info = &dec->input_state->info; - if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) { - GST_ERROR_OBJECT (dec, "Could not map video buffer"); - } + d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 0); + stride = GST_VIDEO_INFO_COMP_STRIDE (info, 0); + h = GST_VIDEO_INFO_COMP_HEIGHT (info, 0); + h = MIN (h, img->h); + w = GST_VIDEO_INFO_COMP_WIDTH (info, 0); + w = MIN (w, img->w); - for (comp = 0; comp < 3; comp++) { - dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp); - src = img->planes[comp]; - width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp); - height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp); - deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp); - srcstride = img->stride[comp]; + for (i = 0; i < h; i++) + memcpy (d + i * stride, + img->planes[VPX_PLANE_Y] + i * img->stride[VPX_PLANE_Y], w); - /* FIXME (Edward) : Do a plane memcpy is srcstride == deststride instead - * of copying line by line */ - for (line = 0; line < height; line++) { - memcpy (dest, src, width); - dest += deststride; - src += srcstride; - } - } + d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 1); + stride = GST_VIDEO_INFO_COMP_STRIDE (info, 1); + h = GST_VIDEO_INFO_COMP_HEIGHT (info, 1); + h = MIN (h, img->h >> img->y_chroma_shift); + w = GST_VIDEO_INFO_COMP_WIDTH (info, 1); + w = MIN (w, img->w >> img->x_chroma_shift); + for (i = 0; i < h; i++) + memcpy (d + i * stride, + img->planes[VPX_PLANE_U] + i * img->stride[VPX_PLANE_U], w); - gst_video_frame_unmap (&frame); + d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 2); + /* Same stride, height, width as above */ + for (i = 0; i < h; i++) + memcpy (d + i * stride, + img->planes[VPX_PLANE_V] + i * img->stride[VPX_PLANE_V], w); } static GstFlowReturn -gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, - GstVideoFrameState * frame) +open_codec (GstVP8Dec * dec, GstVideoCodecFrame * frame) +{ + int flags = 0; + vpx_codec_stream_info_t stream_info; + vpx_codec_caps_t caps; + GstVideoCodecState *state = dec->input_state; + vpx_codec_err_t status; + GstVideoCodecState *output_state; + + memset (&stream_info, 0, sizeof (stream_info)); + stream_info.sz = sizeof (stream_info); + + status = vpx_codec_peek_stream_info (&vpx_codec_vp8_dx_algo, + GST_BUFFER_DATA (frame->input_buffer), + GST_BUFFER_SIZE (frame->input_buffer), &stream_info); + + if (status != VPX_CODEC_OK || !stream_info.is_kf) { + GST_WARNING_OBJECT (dec, "No keyframe, skipping"); + gst_video_decoder_finish_frame (GST_VIDEO_DECODER (dec), frame); + return GST_FLOW_OK; + } + + output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), + GST_VIDEO_FORMAT_I420, stream_info.w, stream_info.h, state); + gst_video_codec_state_unref (output_state); + gst_vp8_dec_send_tags (dec); + + caps = vpx_codec_get_caps (&vpx_codec_vp8_dx_algo); + + if (dec->post_processing) { + if (!(caps & VPX_CODEC_CAP_POSTPROC)) { + GST_WARNING_OBJECT (dec, "Decoder does not support post processing"); + } else { + flags |= VPX_CODEC_USE_POSTPROC; + } + } + + status = + vpx_codec_dec_init (&dec->decoder, &vpx_codec_vp8_dx_algo, NULL, flags); + if (status != VPX_CODEC_OK) { + GST_ELEMENT_ERROR (dec, LIBRARY, INIT, + ("Failed to initialize VP8 decoder"), ("%s", + gst_vpx_error_name (status))); + return GST_FLOW_ERROR; + } + + if ((caps & VPX_CODEC_CAP_POSTPROC) && dec->post_processing) { + vp8_postproc_cfg_t pp_cfg = { 0, }; + + pp_cfg.post_proc_flag = dec->post_processing_flags; + pp_cfg.deblocking_level = dec->deblocking_level; + pp_cfg.noise_level = dec->noise_level; + + status = vpx_codec_control (&dec->decoder, VP8_SET_POSTPROC, &pp_cfg); + if (status != VPX_CODEC_OK) { + GST_WARNING_OBJECT (dec, "Couldn't set postprocessing settings: %s", + gst_vpx_error_name (status)); + } + } + + dec->decoder_inited = TRUE; + + return GST_FLOW_OK; +} + +static GstFlowReturn +gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstVP8Dec *dec; GstFlowReturn ret = GST_FLOW_OK; @@ -366,87 +432,15 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, vpx_image_t *img; long decoder_deadline = 0; GstClockTimeDiff deadline; - GstMapInfo map; GST_DEBUG_OBJECT (decoder, "handle_frame"); dec = GST_VP8_DEC (decoder); - /* FIXME : Move this to a separate function for clarity */ - if (!dec->decoder_inited) { - int flags = 0; - vpx_codec_stream_info_t stream_info; - vpx_codec_caps_t caps; - GstVideoState *state = &GST_BASE_VIDEO_CODEC (dec)->state; + if (!dec->decoder_inited) + ret = open_codec (dec, frame); - memset (&stream_info, 0, sizeof (stream_info)); - stream_info.sz = sizeof (stream_info); - - gst_buffer_map (frame->sink_buffer, &map, GST_MAP_READ); - - status = - vpx_codec_peek_stream_info (&vpx_codec_vp8_dx_algo, map.data, map.size, - &stream_info); - - gst_buffer_unmap (frame->sink_buffer, &map); - - if (status != VPX_CODEC_OK || !stream_info.is_kf) { - GST_WARNING_OBJECT (decoder, "No keyframe, skipping"); - gst_base_video_decoder_finish_frame (decoder, frame); - return GST_FLOW_OK; - } - - state->width = stream_info.w; - state->height = stream_info.h; - state->format = GST_VIDEO_FORMAT_I420; - if (state->par_n == 0 || state->par_d == 0) { - state->par_n = 1; - state->par_d = 1; - } - gst_vp8_dec_send_tags (dec); - gst_base_video_decoder_set_src_caps (decoder); - - caps = vpx_codec_get_caps (&vpx_codec_vp8_dx_algo); - - if (dec->post_processing) { - if (!(caps & VPX_CODEC_CAP_POSTPROC)) { - GST_WARNING_OBJECT (decoder, - "Decoder does not support post processing"); - } else { - flags |= VPX_CODEC_USE_POSTPROC; - } - } - - status = - vpx_codec_dec_init (&dec->decoder, &vpx_codec_vp8_dx_algo, NULL, flags); - if (status != VPX_CODEC_OK) { - GST_ELEMENT_ERROR (dec, LIBRARY, INIT, - ("Failed to initialize VP8 decoder"), ("%s", - gst_vpx_error_name (status))); - return GST_FLOW_ERROR; - } - - if ((caps & VPX_CODEC_CAP_POSTPROC) && dec->post_processing) { - vp8_postproc_cfg_t pp_cfg = { 0, }; - - pp_cfg.post_proc_flag = dec->post_processing_flags; - pp_cfg.deblocking_level = dec->deblocking_level; - pp_cfg.noise_level = dec->noise_level; - - status = vpx_codec_control (&dec->decoder, VP8_SET_POSTPROC, &pp_cfg); - if (status != VPX_CODEC_OK) { - GST_WARNING_OBJECT (dec, "Couldn't set postprocessing settings: %s", - gst_vpx_error_name (status)); - } - } - - dec->decoder_inited = TRUE; - } - - if (!GST_BUFFER_FLAG_IS_SET (frame->sink_buffer, GST_BUFFER_FLAG_DELTA_UNIT)) - gst_base_video_decoder_set_sync_point (decoder); - - deadline = gst_base_video_decoder_get_max_decode_time (decoder, frame); + deadline = gst_video_decoder_get_max_decode_time (decoder, frame); if (deadline < 0) { decoder_deadline = 1; } else if (deadline == G_MAXINT64) { @@ -455,14 +449,9 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, decoder_deadline = MAX (1, deadline / GST_MSECOND); } - gst_buffer_map (frame->sink_buffer, &map, GST_MAP_READ); - - status = - vpx_codec_decode (&dec->decoder, map.data, map.size, NULL, - decoder_deadline); - - gst_buffer_unmap (frame->sink_buffer, &map); - + status = vpx_codec_decode (&dec->decoder, + GST_BUFFER_DATA (frame->input_buffer), + GST_BUFFER_SIZE (frame->input_buffer), NULL, decoder_deadline); if (status) { GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE, ("Failed to decode frame"), ("%s", gst_vpx_error_name (status))); @@ -474,15 +463,15 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, if (deadline < 0) { GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)", (double) -deadline / GST_SECOND); - gst_base_video_decoder_drop_frame (decoder, frame); + gst_video_decoder_drop_frame (decoder, frame); } else { - ret = gst_base_video_decoder_alloc_src_frame (decoder, frame); + ret = gst_video_decoder_alloc_output_frame (decoder, frame); if (ret == GST_FLOW_OK) { - gst_vp8_dec_image_to_buffer (dec, img, frame->src_buffer); - ret = gst_base_video_decoder_finish_frame (decoder, frame); + gst_vp8_dec_image_to_buffer (dec, img, frame->output_buffer); + ret = gst_video_decoder_finish_frame (decoder, frame); } else { - gst_base_video_decoder_finish_frame (decoder, frame); + gst_video_decoder_finish_frame (decoder, frame); } } @@ -494,8 +483,8 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, } } else { /* Invisible frame */ - frame->decode_only = 1; - gst_base_video_decoder_finish_frame (decoder, frame); + GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame); + gst_video_decoder_finish_frame (decoder, frame); } return ret; diff --git a/ext/vp8/gstvp8dec.h b/ext/vp8/gstvp8dec.h index f68c85f2e1..71e2cd97ca 100644 --- a/ext/vp8/gstvp8dec.h +++ b/ext/vp8/gstvp8dec.h @@ -24,7 +24,7 @@ #define __GST_VP8_DEC_H__ #include -#include +#include /* FIXME: Undef HAVE_CONFIG_H because vpx_codec.h uses it, * which causes compilation failures */ @@ -53,7 +53,7 @@ typedef struct _GstVP8DecClass GstVP8DecClass; struct _GstVP8Dec { - GstBaseVideoDecoder base_video_decoder; + GstVideoDecoder base_video_decoder; /* < private > */ vpx_codec_ctx_t decoder; @@ -66,11 +66,13 @@ struct _GstVP8Dec enum vp8_postproc_level post_processing_flags; gint deblocking_level; gint noise_level; + + GstVideoCodecState *input_state; }; struct _GstVP8DecClass { - GstBaseVideoDecoderClass base_video_decoder_class; + GstVideoDecoderClass base_video_decoder_class; }; GType gst_vp8_dec_get_type (void); diff --git a/ext/vp8/gstvp8enc.c b/ext/vp8/gstvp8enc.c index 7c8b2db136..e04aae80bb 100644 --- a/ext/vp8/gstvp8enc.c +++ b/ext/vp8/gstvp8enc.c @@ -219,23 +219,23 @@ static void gst_vp8_enc_set_property (GObject * object, guint prop_id, static void gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec); -static gboolean gst_vp8_enc_start (GstBaseVideoEncoder * encoder); -static gboolean gst_vp8_enc_stop (GstBaseVideoEncoder * encoder); -static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder * - base_video_encoder, GstVideoInfo * info); -static gboolean gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder); -static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * - base_video_encoder, GstVideoFrameState * frame); -static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * encoder, - GstVideoFrameState * frame); -static gboolean gst_vp8_enc_sink_event (GstBaseVideoEncoder * - base_video_encoder, GstEvent * event); +static gboolean gst_vp8_enc_start (GstVideoEncoder * encoder); +static gboolean gst_vp8_enc_stop (GstVideoEncoder * encoder); +static gboolean gst_vp8_enc_set_format (GstVideoEncoder * + video_encoder, GstVideoCodecState * state); +static gboolean gst_vp8_enc_finish (GstVideoEncoder * video_encoder); +static GstFlowReturn gst_vp8_enc_handle_frame (GstVideoEncoder * + video_encoder, GstVideoCodecFrame * frame); +static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame); +static gboolean gst_vp8_enc_sink_event (GstVideoEncoder * + video_encoder, GstEvent * event); static GstStaticPadTemplate gst_vp8_enc_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) ); static GstStaticPadTemplate gst_vp8_enc_src_template = @@ -245,45 +245,61 @@ GST_STATIC_PAD_TEMPLATE ("src", GST_STATIC_CAPS ("video/x-vp8") ); -#define gst_vp8_enc_parent_class parent_class -G_DEFINE_TYPE_WITH_CODE (GstVP8Enc, gst_vp8_enc, GST_TYPE_BASE_VIDEO_ENCODER, - G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL); - G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);); +static void +do_init (GType vp8enc_type) +{ + static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL }; + const GInterfaceInfo preset_interface_info = { + NULL, /* interface_init */ + NULL, /* interface_finalize */ + NULL /* interface_data */ + }; + g_type_add_interface_static (vp8enc_type, GST_TYPE_TAG_SETTER, + &tag_setter_info); + g_type_add_interface_static (vp8enc_type, GST_TYPE_PRESET, + &preset_interface_info); +} + +GST_BOILERPLATE_FULL (GstVP8Enc, gst_vp8_enc, GstVideoEncoder, + GST_TYPE_VIDEO_ENCODER, do_init); static void -gst_vp8_enc_class_init (GstVP8EncClass * klass) +gst_vp8_enc_base_init (gpointer g_class) { - GObjectClass *gobject_class; - GstElementClass *element_class; - GstBaseVideoEncoderClass *base_video_encoder_class; + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gobject_class = G_OBJECT_CLASS (klass); - element_class = GST_ELEMENT_CLASS (klass); - base_video_encoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); - - gobject_class->set_property = gst_vp8_enc_set_property; - gobject_class->get_property = gst_vp8_enc_get_property; - gobject_class->finalize = gst_vp8_enc_finalize; - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_enc_src_template)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_enc_sink_template)); + gst_element_class_add_static_pad_template (element_class, + &gst_vp8_enc_src_template); + gst_element_class_add_static_pad_template (element_class, + &gst_vp8_enc_sink_template); gst_element_class_set_details_simple (element_class, "On2 VP8 Encoder", "Codec/Encoder/Video", "Encode VP8 video streams", "David Schleef "); +} - base_video_encoder_class->start = gst_vp8_enc_start; - base_video_encoder_class->stop = gst_vp8_enc_stop; - base_video_encoder_class->handle_frame = gst_vp8_enc_handle_frame; - base_video_encoder_class->set_format = gst_vp8_enc_set_format; - base_video_encoder_class->finish = gst_vp8_enc_finish; - base_video_encoder_class->shape_output = gst_vp8_enc_shape_output; - base_video_encoder_class->event = gst_vp8_enc_sink_event; +static void +gst_vp8_enc_class_init (GstVP8EncClass * klass) +{ + GObjectClass *gobject_class; + GstVideoEncoderClass *video_encoder_class; + + gobject_class = G_OBJECT_CLASS (klass); + video_encoder_class = GST_VIDEO_ENCODER_CLASS (klass); + + gobject_class->set_property = gst_vp8_enc_set_property; + gobject_class->get_property = gst_vp8_enc_get_property; + gobject_class->finalize = gst_vp8_enc_finalize; + + video_encoder_class->start = gst_vp8_enc_start; + video_encoder_class->stop = gst_vp8_enc_stop; + video_encoder_class->handle_frame = gst_vp8_enc_handle_frame; + video_encoder_class->set_format = gst_vp8_enc_set_format; + video_encoder_class->finish = gst_vp8_enc_finish; + video_encoder_class->pre_push = gst_vp8_enc_pre_push; + video_encoder_class->sink_event = gst_vp8_enc_sink_event; g_object_class_install_property (gobject_class, PROP_BITRATE, g_param_spec_int ("bitrate", "Bit rate", @@ -430,7 +446,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) } static void -gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc) +gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass) { GST_DEBUG_OBJECT (gst_vp8_enc, "init"); @@ -651,21 +667,21 @@ gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value, } static gboolean -gst_vp8_enc_start (GstBaseVideoEncoder * base_video_encoder) +gst_vp8_enc_start (GstVideoEncoder * video_encoder) { - GST_DEBUG_OBJECT (base_video_encoder, "start"); + GST_DEBUG_OBJECT (video_encoder, "start"); return TRUE; } static gboolean -gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder) +gst_vp8_enc_stop (GstVideoEncoder * video_encoder) { GstVP8Enc *encoder; - GST_DEBUG_OBJECT (base_video_encoder, "stop"); + GST_DEBUG_OBJECT (video_encoder, "stop"); - encoder = GST_VP8_ENC (base_video_encoder); + encoder = GST_VP8_ENC (video_encoder); if (encoder->inited) { vpx_codec_destroy (&encoder->encoder); @@ -689,8 +705,8 @@ gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder) } static gboolean -gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, - GstVideoInfo * info) +gst_vp8_enc_set_format (GstVideoEncoder * video_encoder, + GstVideoCodecState * state) { GstVP8Enc *encoder; vpx_codec_enc_cfg_t cfg; @@ -698,13 +714,15 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, vpx_image_t *image; guint8 *data = NULL; GstCaps *caps; - gboolean ret; + gboolean ret = TRUE; + GstVideoInfo *info = &state->info; + GstVideoCodecState *output_state; - encoder = GST_VP8_ENC (base_video_encoder); - GST_DEBUG_OBJECT (base_video_encoder, "set_format"); + encoder = GST_VP8_ENC (video_encoder); + GST_DEBUG_OBJECT (video_encoder, "set_format"); if (encoder->inited) { - GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation"); + GST_DEBUG_OBJECT (video_encoder, "refusing renegotiation"); return FALSE; } @@ -716,14 +734,10 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, return FALSE; } - /* Scale default bitrate to our size */ - cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate, - info->width * info->height, cfg.g_w * cfg.g_h); - - cfg.g_w = info->width; - cfg.g_h = info->height; - cfg.g_timebase.num = info->fps_d; - cfg.g_timebase.den = info->fps_n; + cfg.g_w = GST_VIDEO_INFO_WIDTH (info); + cfg.g_h = GST_VIDEO_INFO_HEIGHT (info); + cfg.g_timebase.num = GST_VIDEO_INFO_FPS_D (info); + cfg.g_timebase.den = GST_VIDEO_INFO_FPS_N (info); cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; @@ -743,6 +757,7 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, } else { cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2); cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2); + cfg.rc_target_bitrate = encoder->bitrate; } cfg.rc_dropframe_thresh = encoder->drop_frame; cfg.rc_resize_allowed = encoder->resize_allowed; @@ -823,11 +838,17 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, cfg.g_lag_in_frames = encoder->lag_in_frames; - gst_base_video_encoder_set_latency (base_video_encoder, 0, + gst_video_encoder_set_latency (video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, - info->fps_d * GST_SECOND, info->fps_n)); + GST_VIDEO_INFO_FPS_D (info) * GST_SECOND, + GST_VIDEO_INFO_FPS_N (info))); encoder->inited = TRUE; + /* Store input state */ + if (encoder->input_state) + gst_video_codec_state_unref (encoder->input_state); + encoder->input_state = gst_video_codec_state_ref (state); + /* prepare cached image buffer setup */ image = &encoder->image; memset (image, 0, sizeof (*image)); @@ -835,8 +856,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, image->fmt = VPX_IMG_FMT_I420; image->bps = 12; image->x_chroma_shift = image->y_chroma_shift = 1; - image->w = image->d_w = info->width; - image->h = image->d_h = info->height; + image->w = image->d_w = GST_VIDEO_INFO_WIDTH (info); + image->h = image->d_h = GST_VIDEO_INFO_HEIGHT (info); image->stride[VPX_PLANE_Y] = GST_VIDEO_INFO_COMP_STRIDE (info, 0); image->stride[VPX_PLANE_U] = GST_VIDEO_INFO_COMP_STRIDE (info, 1); @@ -845,19 +866,13 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, image->planes[VPX_PLANE_U] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 1); image->planes[VPX_PLANE_V] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 2); - caps = gst_caps_new_simple ("video/x-vp8", - "width", G_TYPE_INT, info->width, - "height", G_TYPE_INT, info->height, - "framerate", GST_TYPE_FRACTION, info->fps_n, - info->fps_d, - "pixel-aspect-ratio", GST_TYPE_FRACTION, info->par_n, info->par_d, NULL); + caps = gst_caps_new_simple ("video/x-vp8", NULL); { GstStructure *s; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; - GstMapInfo map; s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ @@ -866,38 +881,34 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); - gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE); - data = map.data; + data = GST_BUFFER_DATA (stream_hdr); GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ - GST_WRITE_UINT16_BE (data + 8, info->width); - GST_WRITE_UINT16_BE (data + 10, info->height); - GST_WRITE_UINT24_BE (data + 12, info->par_n); - GST_WRITE_UINT24_BE (data + 15, info->par_d); - GST_WRITE_UINT32_BE (data + 18, info->fps_n); - GST_WRITE_UINT32_BE (data + 22, info->fps_d); + GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info)); + GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info)); + GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info)); + GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info)); + GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info)); + GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info)); - gst_buffer_unmap (stream_hdr, &map); - - GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER); + GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (stream_hdr); - iface_tags = - gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder)); + iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder)); if (iface_tags) { vorbiscomment = gst_tag_list_to_vorbiscomment_buffer (iface_tags, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); - GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER); + GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); @@ -909,9 +920,9 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, gst_structure_set_value (s, "streamheader", &array); g_value_unset (&array); } - - ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps); - gst_caps_unref (caps); + output_state = + gst_video_encoder_set_output_state (video_encoder, caps, state); + gst_video_codec_state_unref (output_state); return ret; } @@ -921,12 +932,12 @@ gst_vp8_enc_process (GstVP8Enc * encoder) { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; - GstBaseVideoEncoder *base_video_encoder; + GstVideoEncoder *video_encoder; GstVP8EncCoderHook *hook; - GstVideoFrameState *frame; + GstVideoCodecFrame *frame; GstFlowReturn ret = GST_FLOW_OK; - base_video_encoder = GST_BASE_VIDEO_ENCODER (encoder); + video_encoder = GST_VIDEO_ENCODER (encoder); pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); while (pkt != NULL) { @@ -943,12 +954,12 @@ gst_vp8_enc_process (GstVP8Enc * encoder) g_byte_array_append (encoder->first_pass_cache_content, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); - frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); + frame = gst_video_encoder_get_oldest_frame (video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); - GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE); - frame->src_buffer = buffer; - gst_base_video_encoder_finish_frame (base_video_encoder, frame); + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); + frame->output_buffer = buffer; + gst_video_encoder_finish_frame (video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); @@ -960,15 +971,17 @@ gst_vp8_enc_process (GstVP8Enc * encoder) } invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0; - frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); + frame = gst_video_encoder_get_oldest_frame (video_encoder); g_assert (frame != NULL); - frame->is_sync_point = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; + if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0) + GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); + else + GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame); hook = frame->coder_hook; - /* FIXME : It would be nice to avoid the memory copy ... */ - buffer = - gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf, - pkt->data.frame.sz), pkt->data.frame.sz); + buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); + + memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); if (hook->image) g_slice_free (vpx_image_t, hook->image); @@ -977,8 +990,8 @@ gst_vp8_enc_process (GstVP8Enc * encoder) if (invisible) { hook->invisible = g_list_append (hook->invisible, buffer); } else { - frame->src_buffer = buffer; - ret = gst_base_video_encoder_finish_frame (base_video_encoder, frame); + frame->output_buffer = buffer; + ret = gst_video_encoder_finish_frame (video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); @@ -988,15 +1001,15 @@ gst_vp8_enc_process (GstVP8Enc * encoder) } static GstFlowReturn -gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) +gst_vp8_enc_finish (GstVideoEncoder * video_encoder) { GstVP8Enc *encoder; int flags = 0; vpx_codec_err_t status; - GST_DEBUG_OBJECT (base_video_encoder, "finish"); + GST_DEBUG_OBJECT (video_encoder, "finish"); - encoder = GST_VP8_ENC (base_video_encoder); + encoder = GST_VP8_ENC (video_encoder); status = vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags, @@ -1030,48 +1043,40 @@ static vpx_image_t * gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer) { vpx_image_t *image = g_slice_new (vpx_image_t); - GstVideoFrame frame; + guint8 *data = GST_BUFFER_DATA (buffer); memcpy (image, &enc->image, sizeof (*image)); - gst_video_frame_map (&frame, &GST_BASE_VIDEO_CODEC (enc)->info, - buffer, GST_MAP_READ); - - image->img_data = frame.data[0]; - image->planes[VPX_PLANE_Y] = frame.data[0]; - image->planes[VPX_PLANE_U] = frame.data[1]; - image->planes[VPX_PLANE_V] = frame.data[2]; - - /* FIXME : We should only unmap when we're done with it */ - - gst_video_frame_unmap (&frame); + image->img_data = data; + image->planes[VPX_PLANE_Y] += (data - (guint8 *) NULL); + image->planes[VPX_PLANE_U] += (data - (guint8 *) NULL); + image->planes[VPX_PLANE_V] += (data - (guint8 *) NULL); return image; } static GstFlowReturn -gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, - GstVideoFrameState * frame) +gst_vp8_enc_handle_frame (GstVideoEncoder * video_encoder, + GstVideoCodecFrame * frame) { GstVP8Enc *encoder; - const GstVideoState *state; vpx_codec_err_t status; int flags = 0; vpx_image_t *image; GstVP8EncCoderHook *hook; int quality; - GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); + GST_DEBUG_OBJECT (video_encoder, "handle_frame"); - encoder = GST_VP8_ENC (base_video_encoder); + encoder = GST_VP8_ENC (video_encoder); - state = gst_base_video_encoder_get_state (base_video_encoder); encoder->n_frames++; - GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width, - state->height); + GST_DEBUG_OBJECT (video_encoder, "size %d %d", + GST_VIDEO_INFO_WIDTH (&encoder->input_state->info), + GST_VIDEO_INFO_HEIGHT (&encoder->input_state->info)); - image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer); + image = gst_vp8_enc_buffer_to_image (encoder, frame->input_buffer); hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; @@ -1079,7 +1084,7 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, frame->coder_hook_destroy_notify = (GDestroyNotify) gst_vp8_enc_coder_hook_free; - if (frame->force_keyframe) { + if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) { flags |= VPX_EFLAG_FORCE_KF; } @@ -1112,22 +1117,22 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist) } static GstFlowReturn -gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, - GstVideoFrameState * frame) +gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder, + GstVideoCodecFrame * frame) { GstVP8Enc *encoder; GstBuffer *buf; - const GstVideoState *state; - GstFlowReturn ret; + GstFlowReturn ret = GST_FLOW_OK; GstVP8EncCoderHook *hook = frame->coder_hook; GList *l; gint inv_count; + GstVideoInfo *info; - GST_DEBUG_OBJECT (base_video_encoder, "shape_output"); + GST_DEBUG_OBJECT (video_encoder, "pre_push"); - encoder = GST_VP8_ENC (base_video_encoder); + encoder = GST_VP8_ENC (video_encoder); - state = gst_base_video_encoder_get_state (base_video_encoder); + info = &encoder->input_state->info; g_assert (hook != NULL); @@ -1135,7 +1140,8 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, buf = l->data; l->data = NULL; - if (l == hook->invisible && frame->is_sync_point) { + /* FIXME : All of this should have already been handled by base classes, no ? */ + if (l == hook->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { @@ -1143,16 +1149,18 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, encoder->keyframe_distance++; } - GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->src_buffer); + GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer); GST_BUFFER_DURATION (buf) = 0; GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, - GST_SECOND * state->fps_d, state->fps_n); + GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); - ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); + gst_buffer_set_caps (buf, + GST_PAD_CAPS (GST_VIDEO_ENCODER_SRC_PAD (video_encoder))); + ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); @@ -1160,10 +1168,10 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, } } - buf = frame->src_buffer; - frame->src_buffer = NULL; + buf = frame->output_buffer; - if (!hook->invisible && frame->is_sync_point) { + /* FIXME : All of this should have already been handled by base classes, no ? */ + if (!hook->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { @@ -1176,25 +1184,22 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, - GST_SECOND * state->fps_d, state->fps_n); + GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); - GST_LOG_OBJECT (base_video_encoder, "src ts: %" GST_TIME_FORMAT, + GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); - ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); - if (ret != GST_FLOW_OK) { - GST_WARNING_OBJECT (encoder, "flow error %d", ret); - } - done: return ret; } static gboolean -gst_vp8_enc_sink_event (GstBaseVideoEncoder * benc, GstEvent * event) +gst_vp8_enc_sink_event (GstVideoEncoder * benc, GstEvent * event) { GstVP8Enc *enc = GST_VP8_ENC (benc); + /* FIXME : Move this to base encoder class */ + if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) { GstTagList *list; GstTagSetter *setter = GST_TAG_SETTER (enc); diff --git a/ext/vp8/gstvp8enc.h b/ext/vp8/gstvp8enc.h index 3b01512978..22e5b1a14e 100644 --- a/ext/vp8/gstvp8enc.h +++ b/ext/vp8/gstvp8enc.h @@ -23,7 +23,7 @@ #define __GST_VP8_ENC_H__ #include -#include +#include /* FIXME: Undef HAVE_CONFIG_H because vpx_codec.h uses it, * which causes compilation failures */ @@ -52,7 +52,7 @@ typedef struct _GstVP8EncClass GstVP8EncClass; struct _GstVP8Enc { - GstBaseVideoEncoder base_video_encoder; + GstVideoEncoder base_video_encoder; /* < private > */ vpx_codec_ctx_t encoder; @@ -93,11 +93,13 @@ struct _GstVP8Enc int n_frames; int keyframe_distance; + + GstVideoCodecState *input_state; }; struct _GstVP8EncClass { - GstBaseVideoEncoderClass base_video_encoder_class; + GstVideoEncoderClass base_video_encoder_class; }; GType gst_vp8_enc_get_type (void);