From 1d3996325a5997abadd3ccd43f3dc45a2eb353ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 28 May 2012 16:05:21 +0200 Subject: [PATCH] vp8: Port to 0.11 again --- ext/vp8/gstvp8dec.c | 146 +++++++++++++++++++++++++++----------------- ext/vp8/gstvp8enc.c | 114 +++++++++++++++++----------------- 2 files changed, 148 insertions(+), 112 deletions(-) diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c index 7de1829d18..2ef81adf39 100644 --- a/ext/vp8/gstvp8dec.c +++ b/ext/vp8/gstvp8dec.c @@ -48,6 +48,9 @@ #include "gstvp8dec.h" #include "gstvp8utils.h" +#include +#include + GST_DEBUG_CATEGORY_STATIC (gst_vp8dec_debug); #define GST_CAT_DEFAULT gst_vp8dec_debug @@ -103,6 +106,8 @@ static gboolean gst_vp8_dec_set_format (GstVideoDecoder * decoder, static gboolean gst_vp8_dec_reset (GstVideoDecoder * decoder, gboolean hard); static GstFlowReturn gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame); +static gboolean gst_vp8_dec_decide_allocation (GstVideoDecoder * decoder, + GstQuery * query); static GstStaticPadTemplate gst_vp8_dec_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", @@ -115,35 +120,21 @@ static GstStaticPadTemplate gst_vp8_dec_src_template = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) ); -GST_BOILERPLATE (GstVP8Dec, gst_vp8_dec, GstVideoDecoder, - GST_TYPE_VIDEO_DECODER); - -static void -gst_vp8_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gst_element_class_add_static_pad_template (element_class, - &gst_vp8_dec_src_template); - gst_element_class_add_static_pad_template (element_class, - &gst_vp8_dec_sink_template); - - gst_element_class_set_details_simple (element_class, - "On2 VP8 Decoder", - "Codec/Decoder/Video", - "Decode VP8 video streams", "David Schleef "); -} +#define parent_class gst_vp8_dec_parent_class +G_DEFINE_TYPE (GstVP8Dec, gst_vp8_dec, GST_TYPE_VIDEO_DECODER); static void gst_vp8_dec_class_init (GstVP8DecClass * klass) { GObjectClass *gobject_class; + GstElementClass *element_class; GstVideoDecoderClass *base_video_decoder_class; gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); base_video_decoder_class = GST_VIDEO_DECODER_CLASS (klass); gobject_class->set_property = gst_vp8_dec_set_property; @@ -172,6 +163,16 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) 0, 16, DEFAULT_NOISE_LEVEL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_dec_src_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_dec_sink_template)); + + gst_element_class_set_details_simple (element_class, + "On2 VP8 Decoder", + "Codec/Decoder/Video", + "Decode VP8 video streams", "David Schleef "); + base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vp8_dec_start); base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vp8_dec_stop); base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_vp8_dec_reset); @@ -179,12 +180,13 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) GST_DEBUG_FUNCPTR (gst_vp8_dec_set_format); base_video_decoder_class->handle_frame = GST_DEBUG_FUNCPTR (gst_vp8_dec_handle_frame); + base_video_decoder_class->decide_allocation = gst_vp8_dec_decide_allocation; GST_DEBUG_CATEGORY_INIT (gst_vp8dec_debug, "vp8dec", 0, "VP8 Decoder"); } static void -gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec, GstVP8DecClass * klass) +gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec) { GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vp8_dec; @@ -316,50 +318,45 @@ gst_vp8_dec_send_tags (GstVP8Dec * dec) { GstTagList *list; - list = gst_tag_list_new (); + list = gst_tag_list_new_empty (); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_VIDEO_CODEC, "VP8 video", NULL); - gst_element_found_tags_for_pad (GST_ELEMENT (dec), - GST_VIDEO_DECODER_SRC_PAD (dec), list); + gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (dec), + gst_event_new_tag (list)); } static void gst_vp8_dec_image_to_buffer (GstVP8Dec * dec, const vpx_image_t * img, GstBuffer * buffer) { - int stride, w, h, i; - guint8 *d; - GstVideoInfo *info; + int deststride, srcstride, height, width, line, comp; + guint8 *dest, *src; + GstVideoFrame frame; + GstVideoInfo *info = &dec->output_state->info; - info = &dec->output_state->info; + if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) { + GST_ERROR_OBJECT (dec, "Could not map video buffer"); + } - d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 0); - stride = GST_VIDEO_INFO_COMP_STRIDE (info, 0); - h = GST_VIDEO_INFO_COMP_HEIGHT (info, 0); - h = MIN (h, img->h); - w = GST_VIDEO_INFO_COMP_WIDTH (info, 0); - w = MIN (w, img->w); + for (comp = 0; comp < 3; comp++) { + dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp); + src = img->planes[comp]; + width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp); + height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp); + deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp); + srcstride = img->stride[comp]; - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_Y] + i * img->stride[VPX_PLANE_Y], w); + /* FIXME (Edward) : Do a plane memcpy is srcstride == deststride instead + * of copying line by line */ + for (line = 0; line < height; line++) { + memcpy (dest, src, width); + dest += deststride; + src += srcstride; + } + } - d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 1); - stride = GST_VIDEO_INFO_COMP_STRIDE (info, 1); - h = GST_VIDEO_INFO_COMP_HEIGHT (info, 1); - h = MIN (h, img->h >> img->y_chroma_shift); - w = GST_VIDEO_INFO_COMP_WIDTH (info, 1); - w = MIN (w, img->w >> img->x_chroma_shift); - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_U] + i * img->stride[VPX_PLANE_U], w); - - d = GST_BUFFER_DATA (buffer) + GST_VIDEO_INFO_COMP_OFFSET (info, 2); - /* Same stride, height, width as above */ - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_V] + i * img->stride[VPX_PLANE_V], w); + gst_video_frame_unmap (&frame); } static GstFlowReturn @@ -370,13 +367,20 @@ open_codec (GstVP8Dec * dec, GstVideoCodecFrame * frame) vpx_codec_caps_t caps; GstVideoCodecState *state = dec->input_state; vpx_codec_err_t status; + GstMapInfo minfo; memset (&stream_info, 0, sizeof (stream_info)); stream_info.sz = sizeof (stream_info); + if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) { + GST_ERROR_OBJECT (dec, "Failed to map input buffer"); + return GST_FLOW_ERROR; + } + status = vpx_codec_peek_stream_info (&vpx_codec_vp8_dx_algo, - GST_BUFFER_DATA (frame->input_buffer), - GST_BUFFER_SIZE (frame->input_buffer), &stream_info); + minfo.data, minfo.size, &stream_info); + + gst_buffer_unmap (frame->input_buffer, &minfo); if (status != VPX_CODEC_OK || !stream_info.is_kf) { GST_WARNING_OBJECT (dec, "No keyframe, skipping"); @@ -438,6 +442,7 @@ gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) vpx_image_t *img; long decoder_deadline = 0; GstClockTimeDiff deadline; + GstMapInfo minfo; GST_DEBUG_OBJECT (decoder, "handle_frame"); @@ -455,9 +460,16 @@ gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) decoder_deadline = MAX (1, deadline / GST_MSECOND); } + if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) { + GST_ERROR_OBJECT (dec, "Failed to map input buffer"); + return GST_FLOW_ERROR; + } + status = vpx_codec_decode (&dec->decoder, - GST_BUFFER_DATA (frame->input_buffer), - GST_BUFFER_SIZE (frame->input_buffer), NULL, decoder_deadline); + minfo.data, minfo.size, NULL, decoder_deadline); + + gst_buffer_unmap (frame->input_buffer, &minfo); + if (status) { GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE, ("Failed to decode frame"), ("%s", gst_vpx_error_name (status))); @@ -496,4 +508,28 @@ gst_vp8_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) return ret; } +static gboolean +gst_vp8_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query) +{ + GstBufferPool *pool; + GstStructure *config; + + if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query)) + return FALSE; + + g_assert (gst_query_get_n_allocation_pools (query) > 0); + gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL); + g_assert (pool != NULL); + + config = gst_buffer_pool_get_config (pool); + if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) { + gst_buffer_pool_config_add_option (config, + GST_BUFFER_POOL_OPTION_VIDEO_META); + } + gst_buffer_pool_set_config (pool, config); + gst_object_unref (pool); + + return TRUE; +} + #endif /* HAVE_VP8_DECODER */ diff --git a/ext/vp8/gstvp8enc.c b/ext/vp8/gstvp8enc.c index 6316021779..fc3919e63e 100644 --- a/ext/vp8/gstvp8enc.c +++ b/ext/vp8/gstvp8enc.c @@ -56,6 +56,8 @@ #include "gstvp8utils.h" #include "gstvp8enc.h" +#include + GST_DEBUG_CATEGORY_STATIC (gst_vp8enc_debug); #define GST_CAT_DEFAULT gst_vp8enc_debug @@ -230,12 +232,14 @@ static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder, GstVideoCodecFrame * frame); static gboolean gst_vp8_enc_sink_event (GstVideoEncoder * video_encoder, GstEvent * event); +static gboolean gst_vp8_enc_propose_allocation (GstVideoEncoder * encoder, + GstQuery * query); static GstStaticPadTemplate gst_vp8_enc_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) ); static GstStaticPadTemplate gst_vp8_enc_src_template = @@ -245,54 +249,36 @@ GST_STATIC_PAD_TEMPLATE ("src", GST_STATIC_CAPS ("video/x-vp8") ); -static void -do_init (GType vp8enc_type) -{ - static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL }; - const GInterfaceInfo preset_interface_info = { - NULL, /* interface_init */ - NULL, /* interface_finalize */ - NULL /* interface_data */ - }; - - g_type_add_interface_static (vp8enc_type, GST_TYPE_TAG_SETTER, - &tag_setter_info); - g_type_add_interface_static (vp8enc_type, GST_TYPE_PRESET, - &preset_interface_info); -} - -GST_BOILERPLATE_FULL (GstVP8Enc, gst_vp8_enc, GstVideoEncoder, - GST_TYPE_VIDEO_ENCODER, do_init); - -static void -gst_vp8_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gst_element_class_add_static_pad_template (element_class, - &gst_vp8_enc_src_template); - gst_element_class_add_static_pad_template (element_class, - &gst_vp8_enc_sink_template); - - gst_element_class_set_details_simple (element_class, - "On2 VP8 Encoder", - "Codec/Encoder/Video", - "Encode VP8 video streams", "David Schleef "); -} +#define parent_class gst_vp8_enc_parent_class +G_DEFINE_TYPE_WITH_CODE (GstVP8Enc, gst_vp8_enc, GST_TYPE_VIDEO_ENCODER, + G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL); + G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);); static void gst_vp8_enc_class_init (GstVP8EncClass * klass) { GObjectClass *gobject_class; + GstElementClass *element_class; GstVideoEncoderClass *video_encoder_class; gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); video_encoder_class = GST_VIDEO_ENCODER_CLASS (klass); gobject_class->set_property = gst_vp8_enc_set_property; gobject_class->get_property = gst_vp8_enc_get_property; gobject_class->finalize = gst_vp8_enc_finalize; + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_enc_src_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_enc_sink_template)); + + gst_element_class_set_details_simple (element_class, + "On2 VP8 Encoder", + "Codec/Encoder/Video", + "Encode VP8 video streams", "David Schleef "); + video_encoder_class->start = gst_vp8_enc_start; video_encoder_class->stop = gst_vp8_enc_stop; video_encoder_class->handle_frame = gst_vp8_enc_handle_frame; @@ -300,6 +286,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) video_encoder_class->finish = gst_vp8_enc_finish; video_encoder_class->pre_push = gst_vp8_enc_pre_push; video_encoder_class->sink_event = gst_vp8_enc_sink_event; + video_encoder_class->propose_allocation = gst_vp8_enc_propose_allocation; g_object_class_install_property (gobject_class, PROP_BITRATE, g_param_spec_int ("bitrate", "Bit rate", @@ -446,7 +433,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) } static void -gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass) +gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc) { GST_DEBUG_OBJECT (gst_vp8_enc, "init"); @@ -866,17 +853,16 @@ gst_vp8_enc_set_format (GstVideoEncoder * video_encoder, image->stride[VPX_PLANE_Y] = GST_VIDEO_INFO_COMP_STRIDE (info, 0); image->stride[VPX_PLANE_U] = GST_VIDEO_INFO_COMP_STRIDE (info, 1); image->stride[VPX_PLANE_V] = GST_VIDEO_INFO_COMP_STRIDE (info, 2); - image->planes[VPX_PLANE_Y] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 0); - image->planes[VPX_PLANE_U] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 1); - image->planes[VPX_PLANE_V] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 2); - caps = gst_caps_new_simple ("video/x-vp8", NULL); + caps = gst_caps_new_empty_simple ("video/x-vp8"); { GstStructure *s; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; + GstMapInfo map; + s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ @@ -885,7 +871,8 @@ gst_vp8_enc_set_format (GstVideoEncoder * video_encoder, /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); - data = GST_BUFFER_DATA (stream_hdr); + gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE); + data = map.data; GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ @@ -899,7 +886,9 @@ gst_vp8_enc_set_format (GstVideoEncoder * video_encoder, GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info)); GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info)); - GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); + gst_buffer_unmap (stream_hdr, &map); + + GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); @@ -912,7 +901,7 @@ gst_vp8_enc_set_format (GstVideoEncoder * video_encoder, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); - GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS); + GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); @@ -961,7 +950,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder) frame = gst_video_encoder_get_oldest_frame (video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); - GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE); frame->output_buffer = buffer; gst_video_encoder_finish_frame (video_encoder, frame); } @@ -984,9 +973,10 @@ gst_vp8_enc_process (GstVP8Enc * encoder) user_data = gst_video_codec_frame_get_user_data (frame); - buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); - - memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); + /* FIXME : It would be nice to avoid the memory copy ... */ + buffer = + gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf, + pkt->data.frame.sz), pkt->data.frame.sz); if (user_data->image) g_slice_free (vpx_image_t, user_data->image); @@ -1045,17 +1035,15 @@ gst_vp8_enc_finish (GstVideoEncoder * video_encoder) } static vpx_image_t * -gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer) +gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstVideoFrame * frame) { vpx_image_t *image = g_slice_new (vpx_image_t); - guint8 *data = GST_BUFFER_DATA (buffer); memcpy (image, &enc->image, sizeof (*image)); - image->img_data = data; - image->planes[VPX_PLANE_Y] += (data - (guint8 *) NULL); - image->planes[VPX_PLANE_U] += (data - (guint8 *) NULL); - image->planes[VPX_PLANE_V] += (data - (guint8 *) NULL); + image->planes[VPX_PLANE_Y] = GST_VIDEO_FRAME_COMP_DATA (frame, 0); + image->planes[VPX_PLANE_U] = GST_VIDEO_FRAME_COMP_DATA (frame, 1); + image->planes[VPX_PLANE_V] = GST_VIDEO_FRAME_COMP_DATA (frame, 2); return image; } @@ -1070,6 +1058,7 @@ gst_vp8_enc_handle_frame (GstVideoEncoder * video_encoder, vpx_image_t *image; GstVP8EncUserData *user_data; int quality; + GstVideoFrame vframe; GST_DEBUG_OBJECT (video_encoder, "handle_frame"); @@ -1081,7 +1070,9 @@ gst_vp8_enc_handle_frame (GstVideoEncoder * video_encoder, GST_VIDEO_INFO_WIDTH (&encoder->input_state->info), GST_VIDEO_INFO_HEIGHT (&encoder->input_state->info)); - image = gst_vp8_enc_buffer_to_image (encoder, frame->input_buffer); + gst_video_frame_map (&vframe, &encoder->input_state->info, + frame->input_buffer, GST_MAP_READ); + image = gst_vp8_enc_buffer_to_image (encoder, &vframe); user_data = g_slice_new0 (GstVP8EncUserData); user_data->image = image; @@ -1096,6 +1087,8 @@ gst_vp8_enc_handle_frame (GstVideoEncoder * video_encoder, status = vpx_codec_encode (&encoder->encoder, image, encoder->n_frames, 1, flags, quality); + gst_video_frame_unmap (&vframe); + if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); @@ -1161,8 +1154,6 @@ gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder, gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); - gst_buffer_set_caps (buf, - GST_PAD_CAPS (GST_VIDEO_ENCODER_SRC_PAD (video_encoder))); ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf); if (ret != GST_FLOW_OK) { @@ -1213,7 +1204,16 @@ gst_vp8_enc_sink_event (GstVideoEncoder * benc, GstEvent * event) } /* just peeked, baseclass handles the rest */ - return FALSE; + return GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (benc, event); +} + +static gboolean +gst_vp8_enc_propose_allocation (GstVideoEncoder * encoder, GstQuery * query) +{ + gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE); + + return GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder, + query); } #endif /* HAVE_VP8_ENCODER */