diff --git a/ext/mpeg2dec/gstmpeg2dec.c b/ext/mpeg2dec/gstmpeg2dec.c index c1f93ee2fa..730b8b673f 100644 --- a/ext/mpeg2dec/gstmpeg2dec.c +++ b/ext/mpeg2dec/gstmpeg2dec.c @@ -42,7 +42,6 @@ typedef gint mpeg2_state_t; #define STATE_BUFFER 0 #endif -GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE); GST_DEBUG_CATEGORY_STATIC (mpeg2dec_debug); #define GST_CAT_DEFAULT (mpeg2dec_debug) @@ -51,15 +50,6 @@ GST_DEBUG_CATEGORY_STATIC (mpeg2dec_debug); */ #define WARN_THRESHOLD (5) -//#define enable_user_data -#ifdef enable_user_data -static GstStaticPadTemplate user_data_template_factory = -GST_STATIC_PAD_TEMPLATE ("user_data", - GST_PAD_SRC, - GST_PAD_ALWAYS, - GST_STATIC_CAPS_ANY); -#endif - static GstStaticPadTemplate sink_template_factory = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, @@ -72,113 +62,82 @@ static GstStaticPadTemplate src_template_factory = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, - GST_STATIC_CAPS ("video/x-raw, " - "format = (string) { I420, Y42B, Y444 }, " + GST_STATIC_CAPS ("video/x-raw-yuv, " + "format = (fourcc) { YV12, I420, Y42B, Y444 }, " "width = (int) [ 16, 4096 ], " "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0/1, 2147483647/1 ]") ); +GST_BOILERPLATE (GstMpeg2dec, gst_mpeg2dec, GstVideoDecoder, + GST_TYPE_VIDEO_DECODER); + static void gst_mpeg2dec_finalize (GObject * object); -static void gst_mpeg2dec_reset (GstMpeg2dec * mpeg2dec); -static gboolean gst_mpeg2dec_src_event (GstPad * pad, GstObject * parent, - GstEvent * event); -static GstStateChangeReturn gst_mpeg2dec_change_state (GstElement * element, - GstStateChange transition); +/* GstVideoDecoder base class method */ +static gboolean gst_mpeg2dec_open (GstVideoDecoder * decoder); +static gboolean gst_mpeg2dec_close (GstVideoDecoder * decoder); +static gboolean gst_mpeg2dec_start (GstVideoDecoder * decoder); +static gboolean gst_mpeg2dec_set_format (GstVideoDecoder * decoder, + GstVideoCodecState * state); +static gboolean gst_mpeg2dec_reset (GstVideoDecoder * decoder, gboolean hard); +static GstFlowReturn gst_mpeg2dec_finish (GstVideoDecoder * decoder); +static GstFlowReturn gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame); -static gboolean gst_mpeg2dec_sink_event (GstPad * pad, GstObject * parent, - GstEvent * event); -static gboolean gst_mpeg2dec_setcaps (GstPad * pad, GstCaps * caps); -static GstFlowReturn gst_mpeg2dec_chain (GstPad * pad, GstObject * parent, - GstBuffer * buf); +/* GstElement overload */ +static void gst_mpeg2dec_set_index (GstElement * element, GstIndex * index); +static GstIndex *gst_mpeg2dec_get_index (GstElement * element); static void clear_buffers (GstMpeg2dec * mpeg2dec); +static gboolean gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstBuffer ** buf); -//static gboolean gst_mpeg2dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query); -#if 0 -static const GstFormat *gst_mpeg2dec_get_formats (GstPad * pad); -#endif +static void +gst_mpeg2dec_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); -#if 0 -static const GstEventMask *gst_mpeg2dec_get_event_masks (GstPad * pad); -#endif - -static gboolean gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstBuffer ** buf, - GstVideoFrame * frame); - -/*static guint gst_mpeg2dec_signals[LAST_SIGNAL] = { 0 };*/ - -#define gst_mpeg2dec_parent_class parent_class -G_DEFINE_TYPE (GstMpeg2dec, gst_mpeg2dec, GST_TYPE_ELEMENT); + gst_element_class_add_static_pad_template (element_class, + &src_template_factory); + gst_element_class_add_static_pad_template (element_class, + &sink_template_factory); + gst_element_class_set_details_simple (element_class, + "mpeg1 and mpeg2 video decoder", "Codec/Decoder/Video", + "Uses libmpeg2 to decode MPEG video streams", + "Wim Taymans "); +} static void gst_mpeg2dec_class_init (GstMpeg2decClass * klass) { - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - - gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstVideoDecoderClass *video_decoder_class = GST_VIDEO_DECODER_CLASS (klass); gobject_class->finalize = gst_mpeg2dec_finalize; - gst_element_class_add_pad_template (gstelement_class, - gst_static_pad_template_get (&src_template_factory)); - gst_element_class_add_pad_template (gstelement_class, - gst_static_pad_template_get (&sink_template_factory)); -#ifdef enable_user_data - gst_element_class_add_pad_template (gstelement_class, - gst_static_pad_template_get (&user_data_template_factory)); -#endif - gst_element_class_set_static_metadata (gstelement_class, - "mpeg1 and mpeg2 video decoder", "Codec/Decoder/Video", - "Uses libmpeg2 to decode MPEG video streams", - "Wim Taymans "); + video_decoder_class->open = GST_DEBUG_FUNCPTR (gst_mpeg2dec_open); + video_decoder_class->close = GST_DEBUG_FUNCPTR (gst_mpeg2dec_close); + video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_mpeg2dec_start); + video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_mpeg2dec_reset); + video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_mpeg2dec_set_format); + video_decoder_class->handle_frame = + GST_DEBUG_FUNCPTR (gst_mpeg2dec_handle_frame); + video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_mpeg2dec_finish); - gstelement_class->change_state = gst_mpeg2dec_change_state; + element_class->set_index = gst_mpeg2dec_set_index; + element_class->get_index = gst_mpeg2dec_get_index; GST_DEBUG_CATEGORY_INIT (mpeg2dec_debug, "mpeg2dec", 0, - "MPEG2 decoder element"); + "MPEG-2 Video Decoder"); } static void -gst_mpeg2dec_init (GstMpeg2dec * mpeg2dec) +gst_mpeg2dec_init (GstMpeg2dec * mpeg2dec, GstMpeg2decClass * klass) { - /* create the sink and src pads */ - mpeg2dec->sinkpad = - gst_pad_new_from_static_template (&sink_template_factory, "sink"); - gst_pad_set_chain_function (mpeg2dec->sinkpad, - GST_DEBUG_FUNCPTR (gst_mpeg2dec_chain)); -#if 0 - gst_pad_set_query_function (mpeg2dec->sinkpad, - GST_DEBUG_FUNCPTR (gst_mpeg2dec_get_sink_query)); -#endif - gst_pad_set_event_function (mpeg2dec->sinkpad, - GST_DEBUG_FUNCPTR (gst_mpeg2dec_sink_event)); - gst_element_add_pad (GST_ELEMENT (mpeg2dec), mpeg2dec->sinkpad); - - mpeg2dec->srcpad = - gst_pad_new_from_static_template (&src_template_factory, "src"); - gst_pad_set_event_function (mpeg2dec->srcpad, - GST_DEBUG_FUNCPTR (gst_mpeg2dec_src_event)); -#if 0 - gst_pad_set_query_function (mpeg2dec->srcpad, - GST_DEBUG_FUNCPTR (gst_mpeg2dec_src_query)); -#endif - gst_pad_use_fixed_caps (mpeg2dec->srcpad); - gst_element_add_pad (GST_ELEMENT (mpeg2dec), mpeg2dec->srcpad); - -#ifdef enable_user_data - mpeg2dec->userdatapad = - gst_pad_new_from_static_template (&user_data_template_factory, - "user_data"); - gst_element_add_pad (GST_ELEMENT (mpeg2dec), mpeg2dec->userdatapad); -#endif - - mpeg2dec->error_count = 0; mpeg2dec->can_allocate_aligned = TRUE; + gst_video_decoder_set_packetized (GST_VIDEO_DECODER (mpeg2dec), TRUE); /* initialize the mpeg2dec acceleration */ } @@ -188,11 +147,18 @@ gst_mpeg2dec_finalize (GObject * object) { GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (object); + if (mpeg2dec->index) { + gst_object_unref (mpeg2dec->index); + mpeg2dec->index = NULL; + mpeg2dec->index_id = 0; + } + if (mpeg2dec->decoder) { GST_DEBUG_OBJECT (mpeg2dec, "closing decoder"); mpeg2_close (mpeg2dec->decoder); mpeg2dec->decoder = NULL; } + clear_buffers (mpeg2dec); g_free (mpeg2dec->dummybuf[3]); mpeg2dec->dummybuf[3] = NULL; @@ -200,79 +166,183 @@ gst_mpeg2dec_finalize (GObject * object) G_OBJECT_CLASS (parent_class)->finalize (object); } -static void -gst_mpeg2dec_reset (GstMpeg2dec * mpeg2dec) +static gboolean +gst_mpeg2dec_open (GstVideoDecoder * decoder) { - /* reset the initial video state */ - gst_video_info_init (&mpeg2dec->vinfo); - gst_segment_init (&mpeg2dec->segment, GST_FORMAT_UNDEFINED); - mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; - mpeg2dec->frame_period = 0; - mpeg2dec->need_sequence = TRUE; - mpeg2dec->next_time = -1; - mpeg2dec->offset = 0; - mpeg2dec->error_count = 0; - mpeg2dec->can_allocate_aligned = TRUE; - mpeg2_reset (mpeg2dec->decoder, 1); + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + + mpeg2_accel (MPEG2_ACCEL_DETECT); + if ((mpeg2dec->decoder = mpeg2_init ()) == NULL) + return FALSE; + mpeg2dec->info = mpeg2_info (mpeg2dec->decoder); + + return TRUE; } -static void -gst_mpeg2dec_qos_reset (GstMpeg2dec * mpeg2dec) +static gboolean +gst_mpeg2dec_close (GstVideoDecoder * decoder) { + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + + if (mpeg2dec->decoder) { + mpeg2_close (mpeg2dec->decoder); + mpeg2dec->decoder = NULL; + mpeg2dec->info = NULL; + } + clear_buffers (mpeg2dec); + + return TRUE; +} + +static gboolean +gst_mpeg2dec_start (GstVideoDecoder * decoder) +{ + return gst_mpeg2dec_reset (decoder, TRUE); +} + +static gboolean +gst_mpeg2dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) +{ + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + GstStructure *s; + + /* Save input state to be used as reference for output state */ + if (mpeg2dec->input_state) + gst_video_codec_state_unref (mpeg2dec->input_state); + mpeg2dec->input_state = gst_video_codec_state_ref (state); + + s = gst_caps_get_structure (state->caps, 0); + + /* parse the par, this overrides the encoded par */ + mpeg2dec->have_par = gst_structure_get_fraction (s, "pixel-aspect-ratio", + &mpeg2dec->pixel_width, &mpeg2dec->pixel_height); + + return TRUE; +} + +static gboolean +gst_mpeg2dec_reset (GstVideoDecoder * decoder, gboolean hard) +{ + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + + GST_DEBUG_OBJECT (mpeg2dec, "%s", hard ? "hard" : "soft"); + GST_OBJECT_LOCK (mpeg2dec); - mpeg2dec->proportion = 1.0; - mpeg2dec->earliest_time = -1; - mpeg2dec->dropped = 0; - mpeg2dec->processed = 0; + if (mpeg2dec->index) { + gst_object_unref (mpeg2dec->index); + mpeg2dec->index = NULL; + mpeg2dec->index_id = 0; + } GST_OBJECT_UNLOCK (mpeg2dec); + + /* reset the initial video state */ + mpeg2dec->width = -1; + mpeg2dec->height = -1; + mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; + mpeg2dec->frame_period = 0; + mpeg2dec->next_time = -1; + mpeg2dec->offset = 0; + mpeg2dec->can_allocate_aligned = TRUE; + mpeg2_reset (mpeg2dec->decoder, hard); + mpeg2_skip (mpeg2dec->decoder, 1); + + clear_buffers (mpeg2dec); + + return TRUE; } static GstFlowReturn -gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstBuffer ** buf, - GstVideoFrame * frame) +gst_mpeg2dec_finish (GstVideoDecoder * decoder) { - GstFlowReturn ret; + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + + if (mpeg2dec->index && mpeg2dec->closed) { + gst_index_commit (mpeg2dec->index, mpeg2dec->index_id); + } + + return GST_FLOW_OK; +} + +static void +gst_mpeg2dec_set_index (GstElement * element, GstIndex * index) +{ + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (element); + + GST_OBJECT_LOCK (mpeg2dec); + if (mpeg2dec->index) + gst_object_unref (mpeg2dec->index); + mpeg2dec->index = NULL; + mpeg2dec->index_id = 0; + if (index) { + mpeg2dec->index = gst_object_ref (index); + } + GST_OBJECT_UNLOCK (mpeg2dec); + /* object lock might be taken again */ + if (index) + gst_index_get_writer_id (index, GST_OBJECT (element), &mpeg2dec->index_id); +} + +static GstIndex * +gst_mpeg2dec_get_index (GstElement * element) +{ + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (element); + + return (mpeg2dec->index) ? gst_object_ref (mpeg2dec->index) : NULL; +} + +static GstFlowReturn +gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstBuffer ** buf) +{ + GstVideoInfo *info; + GstVideoFormat format; + GstBuffer *inbuf = *buf; GstBuffer *outbuf; - GstVideoFrame outframe; - guint i, n_planes; + guint c; - ret = gst_buffer_pool_acquire_buffer (dec->pool, &outbuf, NULL); - if (G_UNLIKELY (ret != GST_FLOW_OK)) - return ret;; + info = &gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec))->info; + format = GST_VIDEO_INFO_FORMAT (info); - GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec, - "cropping input buffer to output buffer"); + GST_LOG_OBJECT (dec, "Copying input buffer %ux%u (%u) to output buffer " + "%ux%u (%u)", dec->decoded_width, dec->decoded_height, + GST_BUFFER_SIZE (inbuf), info->width, info->height, info->size); - gst_video_frame_map (&outframe, &dec->cinfo, outbuf, GST_MAP_WRITE); + outbuf = gst_video_decoder_alloc_output_buffer (GST_VIDEO_DECODER (dec)); - n_planes = GST_VIDEO_FRAME_N_PLANES (&outframe); + for (c = 0; c < 3; c++) { + const guint8 *src; + guint8 *dest; + guint stride_in, stride_out; + guint c_height, c_width, line; - for (i = 0; i < n_planes; i++) { - guint w, h, j; - guint8 *sp, *dp; - gint ss, ds; + src = + GST_BUFFER_DATA (inbuf) + + gst_video_format_get_component_offset (format, c, dec->decoded_width, + dec->decoded_height); + dest = + GST_BUFFER_DATA (outbuf) + + gst_video_format_get_component_offset (format, c, info->width, + dec->height); + stride_out = gst_video_format_get_row_stride (format, c, info->width); + stride_in = gst_video_format_get_row_stride (format, c, dec->decoded_width); + c_height = gst_video_format_get_component_height (format, c, info->height); + c_width = gst_video_format_get_component_width (format, c, info->width); - sp = GST_VIDEO_FRAME_PLANE_DATA (frame, i); - dp = GST_VIDEO_FRAME_PLANE_DATA (&outframe, i); + GST_DEBUG ("stride_in:%d _out:%d c_width:%d c_height:%d", + stride_in, stride_out, c_width, c_height); - ss = GST_VIDEO_FRAME_PLANE_STRIDE (frame, i); - ds = GST_VIDEO_FRAME_PLANE_STRIDE (&outframe, i); - - w = MIN (ABS (ss), ABS (ds)); - h = GST_VIDEO_FRAME_COMP_HEIGHT (&outframe, i); - - GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy plane %u, w:%u h:%u ", i, w, h); - - for (j = 0; j < h; j++) { - memcpy (dp, sp, w); - dp += ds; - sp += ss; + if (stride_in == stride_out && stride_in == c_width) { + /* FAST PATH */ + memcpy (dest, src, c_height * stride_out); + dest += stride_out * c_height; + src += stride_out * c_height; + } else { + for (line = 0; line < c_height; line++) { + memcpy (dest, src, c_width); + dest += stride_out; + src += stride_in; + } } } - gst_video_frame_unmap (&outframe); - - gst_buffer_copy_into (outbuf, *buf, - GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS, 0, -1); gst_buffer_unref (*buf); *buf = outbuf; @@ -280,83 +350,137 @@ gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstBuffer ** buf, return GST_FLOW_OK; } -static GstFlowReturn -gst_mpeg2dec_negotiate_pool (GstMpeg2dec * dec, GstCaps * caps, - GstVideoInfo * vinfo, GstVideoInfo * cinfo) +static void +gst_mpeg2dec_alloc_sized_buf (GstMpeg2dec * mpeg2dec, guint size, + GstBuffer ** obuf) { - GstQuery *query; - GstBufferPool *pool; - guint size, min, max; - GstStructure *config; - GstCaps *pcaps; + if (mpeg2dec->can_allocate_aligned + && mpeg2dec->decoded_width == mpeg2dec->width + && mpeg2dec->decoded_height == mpeg2dec->height) { - /* find a pool for the negotiated caps now */ - query = gst_query_new_allocation (caps, TRUE); + *obuf = + gst_video_decoder_alloc_output_buffer (GST_VIDEO_DECODER (mpeg2dec)); - if (gst_pad_peer_query (dec->srcpad, query)) { - /* check if downstream supports cropping */ - dec->has_cropping = - gst_query_has_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE); + /* libmpeg2 needs 16 byte aligned buffers... test for this here + * and if it fails only a single time create our own buffers from + * there on below that are correctly aligned */ + if (((uintptr_t) GST_BUFFER_DATA (*obuf)) % 16 == 0) { + GST_LOG_OBJECT (mpeg2dec, "return 16 byte aligned buffer"); + return; + } + + GST_DEBUG_OBJECT (mpeg2dec, + "can't get 16 byte aligned buffers, creating our own ones"); + gst_buffer_unref (*obuf); + mpeg2dec->can_allocate_aligned = FALSE; + } + + /* can't use gst_pad_alloc_buffer() here because the output buffer will + * either be cropped later or be bigger than expected (for the alignment), + * and basetransform-based elements will complain about the wrong unit size + * when not operating in passthrough mode */ + *obuf = gst_buffer_new_and_alloc (size + 15); + GST_BUFFER_DATA (*obuf) = (guint8 *) ALIGN_16 (GST_BUFFER_DATA (*obuf)); + GST_BUFFER_SIZE (*obuf) = size; +} + +static void +gst_mpeg2dec_alloc_buffer (GstMpeg2dec * mpeg2dec, gint64 offset, + GstVideoCodecFrame * frame) +{ + guint8 *buf[3]; + + gst_mpeg2dec_alloc_sized_buf (mpeg2dec, mpeg2dec->size, + &frame->output_buffer); + + buf[0] = GST_BUFFER_DATA (frame->output_buffer); + buf[1] = buf[0] + mpeg2dec->u_offs; + buf[2] = buf[0] + mpeg2dec->v_offs; + + GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, frame %i", + buf[0], buf[1], buf[2], frame->system_frame_number); + + mpeg2_set_buf (mpeg2dec->decoder, buf, + GINT_TO_POINTER (frame->system_frame_number)); + mpeg2dec->buffers = g_list_prepend (mpeg2dec->buffers, + gst_buffer_ref (frame->output_buffer)); + + /* we store the original byteoffset of this picture in the stream here + * because we need it for indexing */ + GST_BUFFER_OFFSET (frame->output_buffer) = offset; +} + +static gboolean +gst_mpeg2dec_negotiate_format (GstMpeg2dec * mpeg2dec) +{ + GstVideoCodecState *new_state; + GstVideoFormat format; + const mpeg2_info_t *info; + const mpeg2_sequence_t *sequence; + gboolean ret = FALSE; + + info = mpeg2_info (mpeg2dec->decoder); + sequence = info->sequence; + + if (sequence->width != sequence->chroma_width && + sequence->height != sequence->chroma_height) { + format = GST_VIDEO_FORMAT_I420; + } else if ((sequence->width == sequence->chroma_width && + sequence->height != sequence->chroma_height) || + (sequence->width != sequence->chroma_width && + sequence->height == sequence->chroma_height)) { + format = GST_VIDEO_FORMAT_Y42B; } else { - /* use the query default then */ - GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints"); - dec->has_cropping = FALSE; + format = GST_VIDEO_FORMAT_Y444; } - if (gst_query_get_n_allocation_pools (query) > 0) { - /* we got configuration from our peer, parse them */ - gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); - } else { - pool = NULL; - size = 0; - min = max = 0; + new_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (mpeg2dec), + format, mpeg2dec->width, mpeg2dec->height, mpeg2dec->input_state); + + /* Ensure interlace caps are set, needed if not using mpegvideoparse */ + if (mpeg2dec->interlaced) + new_state->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED; + + mpeg2dec->size = gst_video_format_get_size (format, + mpeg2dec->decoded_width, mpeg2dec->decoded_height); + mpeg2dec->u_offs = gst_video_format_get_component_offset (format, 1, + mpeg2dec->decoded_width, mpeg2dec->decoded_height); + mpeg2dec->v_offs = gst_video_format_get_component_offset (format, 2, + mpeg2dec->decoded_width, mpeg2dec->decoded_height); + + if (mpeg2dec->pixel_width == 0 || mpeg2dec->pixel_height == 0) { + GValue par = { 0, }; + GValue dar = { 0, }; + GValue dimensions = { 0, }; + + /* assume display aspect ratio (DAR) of 4:3 */ + g_value_init (&dar, GST_TYPE_FRACTION); + gst_value_set_fraction (&dar, 4, 3); + g_value_init (&dimensions, GST_TYPE_FRACTION); + gst_value_set_fraction (&dimensions, mpeg2dec->height, mpeg2dec->width); + + g_value_init (&par, GST_TYPE_FRACTION); + if (!gst_value_fraction_multiply (&par, &dar, &dimensions)) { + gst_value_set_fraction (&dimensions, 1, 1); + } + + mpeg2dec->pixel_width = gst_value_get_fraction_numerator (&par); + mpeg2dec->pixel_height = gst_value_get_fraction_denominator (&par); + + GST_WARNING_OBJECT (mpeg2dec, "Unknown pixel-aspect-ratio, assuming %d:%d", + mpeg2dec->pixel_width, mpeg2dec->pixel_height); + + g_value_unset (&par); + g_value_unset (&dar); + g_value_unset (&dimensions); } - GST_DEBUG_OBJECT (dec, - "size:%d, min:%d, max:%d,pool:%p", size, min, max, pool); - GST_DEBUG_OBJECT (dec, "downstream cropping %d", dec->has_cropping); - - if (pool == NULL) { - /* we did not get a pool, make one ourselves then */ - pool = gst_video_buffer_pool_new (); + if (new_state) { + gst_video_codec_state_unref (new_state); + ret = TRUE; } - if (dec->pool) { - gst_buffer_pool_set_active (dec->pool, FALSE); - gst_object_unref (dec->pool); - } - dec->pool = pool; - - if (dec->need_cropping && dec->has_cropping) { - /* we can crop, configure the pool with buffers of caps and size of the - * decoded picture size and then crop them with metadata */ - pcaps = gst_video_info_to_caps (vinfo); - size = MAX (size, GST_VIDEO_INFO_SIZE (vinfo)); - } else { - /* no cropping, use cropped videoinfo */ - pcaps = gst_caps_ref (caps); - size = MAX (size, GST_VIDEO_INFO_SIZE (cinfo)); - } - - config = gst_buffer_pool_get_config (pool); - gst_buffer_pool_config_set_params (config, pcaps, size, min, max); - gst_caps_unref (pcaps); - - if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) { - /* just set the option, if the pool can support it we will transparently use - * it through the video info API. We could also see if the pool support this - * option and only activate it then. */ - gst_buffer_pool_config_add_option (config, - GST_BUFFER_POOL_OPTION_VIDEO_META); - } - - gst_buffer_pool_set_config (pool, config); - /* and activate */ - gst_buffer_pool_set_active (pool, TRUE); - - gst_query_unref (query); - - return GST_FLOW_OK; + return ret; } static void @@ -375,208 +499,71 @@ static GstFlowReturn handle_sequence (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) { GstFlowReturn ret = GST_FLOW_OK; - const mpeg2_sequence_t *sequence; - gint par_n, par_d; - GstVideoInfo vinfo, cinfo; - GstVideoFormat format; - GstCaps *caps; - gint y_size, uv_size; + GstClockTime latency; - sequence = info->sequence; + if (info->sequence->frame_period == 0) { + GST_WARNING_OBJECT (mpeg2dec, "Frame period is 0!"); + ret = GST_FLOW_ERROR; + goto done; + } - if (sequence->frame_period == 0) - goto invalid_frame_period; + mpeg2dec->width = info->sequence->picture_width; + mpeg2dec->height = info->sequence->picture_height; + mpeg2dec->decoded_width = info->sequence->width; + mpeg2dec->decoded_height = info->sequence->height; + + /* don't take the sequence PAR if we already have one from the sink caps */ + if (!mpeg2dec->have_par) { + mpeg2dec->pixel_width = info->sequence->pixel_width; + mpeg2dec->pixel_height = info->sequence->pixel_height; + } /* mpeg2 video can only be from 16x16 to 4096x4096. Everything - * else is a corrupted file */ - if (sequence->width > 4096 || sequence->width < 16 || - sequence->height > 4096 || sequence->height < 16) - goto invalid_size; - - GST_DEBUG_OBJECT (mpeg2dec, - "widthxheight: %dx%d , decoded_widthxheight: %dx%d", - sequence->picture_width, sequence->picture_height, sequence->width, - sequence->height); - - if (sequence->picture_width != sequence->width || - sequence->picture_height != sequence->height) { - GST_DEBUG_OBJECT (mpeg2dec, "we need to crop"); - mpeg2dec->need_cropping = TRUE; - } else { - GST_DEBUG_OBJECT (mpeg2dec, "no cropping needed"); - mpeg2dec->need_cropping = FALSE; + * else is a corrupted files */ + if (mpeg2dec->width > 4096 || mpeg2dec->width < 16 || + mpeg2dec->height > 4096 || mpeg2dec->height < 16) { + GST_ERROR_OBJECT (mpeg2dec, "Invalid frame dimensions: %d x %d", + mpeg2dec->width, mpeg2dec->height); + return GST_FLOW_ERROR; } - y_size = sequence->width * sequence->height; - /* get subsampling */ - if (sequence->chroma_width < sequence->width) { - /* horizontally subsampled */ - if (sequence->chroma_height < sequence->height) { - /* and vertically subsamples */ - format = GST_VIDEO_FORMAT_I420; - uv_size = y_size >> 2; - } else { - format = GST_VIDEO_FORMAT_Y42B; - uv_size = y_size >> 1; - } - } else { - /* not subsampled */ - format = GST_VIDEO_FORMAT_Y444; - uv_size = y_size; - } - - /* calculate size and offsets of the decoded frames */ - mpeg2dec->size = y_size + 2 * (uv_size); - mpeg2dec->u_offs = y_size; - mpeg2dec->v_offs = y_size + uv_size; - - /* we store the codec size before cropping */ - gst_video_info_init (&vinfo); - gst_video_info_set_format (&vinfo, format, sequence->width, sequence->height); - - /* sink caps par overrides sequence PAR */ - if (mpeg2dec->have_par) { - par_n = mpeg2dec->in_par_n; - par_d = mpeg2dec->in_par_d; - GST_DEBUG_OBJECT (mpeg2dec, "using sink par %d:%d", par_n, par_d); - } else { - par_n = sequence->pixel_width; - par_d = sequence->pixel_height; - GST_DEBUG_OBJECT (mpeg2dec, "using encoded par %d:%d", par_n, par_d); - } - - if (par_n == 0 || par_d == 0) { - if (!gst_util_fraction_multiply (4, 3, sequence->picture_height, - sequence->picture_width, &par_n, &par_d)) - par_n = par_d = 1; - - GST_WARNING_OBJECT (mpeg2dec, "Unknown par, assuming %d:%d", par_n, par_d); - } - vinfo.par_n = par_n; - vinfo.par_d = par_d; - /* set framerate */ - vinfo.fps_n = 27000000; - vinfo.fps_d = sequence->frame_period; - mpeg2dec->frame_period = sequence->frame_period * GST_USECOND / 27; + mpeg2dec->fps_n = 27000000; + mpeg2dec->fps_d = info->sequence->frame_period; + mpeg2dec->frame_period = info->sequence->frame_period * GST_USECOND / 27; - if (!(sequence->flags & SEQ_FLAG_PROGRESSIVE_SEQUENCE)) - vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED; - else - vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + /* Mpeg2dec has 1 frame latency to produce a picture and 1 frame latency in + * it's parser */ + latency = 2 * mpeg2dec->frame_period; + gst_video_decoder_set_latency (GST_VIDEO_DECODER (mpeg2dec), latency, + latency); - vinfo.chroma_site = GST_VIDEO_CHROMA_SITE_MPEG2; - vinfo.colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235; - - if (sequence->flags & SEQ_FLAG_COLOUR_DESCRIPTION) { - /* do color description */ - switch (sequence->colour_primaries) { - case 1: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; - break; - case 4: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M; - break; - case 5: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG; - break; - case 6: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M; - break; - case 7: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M; - break; - /* 0 forbidden */ - /* 2 unspecified */ - /* 3 reserved */ - /* 8-255 reseved */ - default: - vinfo.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN; - break; - } - /* matrix coefficients */ - switch (sequence->matrix_coefficients) { - case 1: - vinfo.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709; - break; - case 4: - vinfo.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_FCC; - break; - case 5: - case 6: - vinfo.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601; - break; - case 7: - vinfo.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M; - break; - /* 0 forbidden */ - /* 2 unspecified */ - /* 3 reserved */ - /* 8-255 reseved */ - default: - vinfo.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN; - break; - } - /* transfer characteristics */ - switch (sequence->transfer_characteristics) { - case 1: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_BT709; - break; - case 4: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA22; - break; - case 5: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA28; - break; - case 6: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_BT709; - break; - case 7: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE240M; - break; - case 8: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10; - break; - /* 0 forbidden */ - /* 2 unspecified */ - /* 3 reserved */ - /* 9-255 reseved */ - default: - vinfo.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN; - break; - } - } + mpeg2dec->interlaced = + !(info->sequence->flags & SEQ_FLAG_PROGRESSIVE_SEQUENCE); GST_DEBUG_OBJECT (mpeg2dec, "sequence flags: %d, frame period: %d (%g), frame rate: %d/%d", - sequence->flags, sequence->frame_period, - (double) (mpeg2dec->frame_period) / GST_SECOND, vinfo.fps_n, vinfo.fps_d); + info->sequence->flags, info->sequence->frame_period, + (double) (mpeg2dec->frame_period) / GST_SECOND, mpeg2dec->fps_n, + mpeg2dec->fps_d); GST_DEBUG_OBJECT (mpeg2dec, "profile: %02x, colour_primaries: %d", - sequence->profile_level_id, sequence->colour_primaries); + info->sequence->profile_level_id, info->sequence->colour_primaries); GST_DEBUG_OBJECT (mpeg2dec, "transfer chars: %d, matrix coef: %d", - sequence->transfer_characteristics, sequence->matrix_coefficients); + info->sequence->transfer_characteristics, + info->sequence->matrix_coefficients); GST_DEBUG_OBJECT (mpeg2dec, "FLAGS: CONSTRAINED_PARAMETERS:%d, PROGRESSIVE_SEQUENCE:%d", - sequence->flags & SEQ_FLAG_CONSTRAINED_PARAMETERS, - sequence->flags & SEQ_FLAG_PROGRESSIVE_SEQUENCE); + info->sequence->flags & SEQ_FLAG_CONSTRAINED_PARAMETERS, + info->sequence->flags & SEQ_FLAG_PROGRESSIVE_SEQUENCE); GST_DEBUG_OBJECT (mpeg2dec, "FLAGS: LOW_DELAY:%d, COLOUR_DESCRIPTION:%d", - sequence->flags & SEQ_FLAG_LOW_DELAY, - sequence->flags & SEQ_FLAG_COLOUR_DESCRIPTION); + info->sequence->flags & SEQ_FLAG_LOW_DELAY, + info->sequence->flags & SEQ_FLAG_COLOUR_DESCRIPTION); - /* for the output caps we always take the cropped dimensions */ - cinfo = vinfo; - gst_video_info_set_format (&cinfo, GST_VIDEO_INFO_FORMAT (&vinfo), - sequence->picture_width, sequence->picture_height); - caps = gst_video_info_to_caps (&cinfo); - gst_pad_set_caps (mpeg2dec->srcpad, caps); - - gst_mpeg2dec_negotiate_pool (mpeg2dec, caps, &vinfo, &cinfo); - gst_caps_unref (caps); - - mpeg2dec->vinfo = vinfo; - mpeg2dec->cinfo = cinfo; + if (!gst_mpeg2dec_negotiate_format (mpeg2dec)) + goto negotiate_failed; mpeg2_custom_fbuf (mpeg2dec->decoder, 1); + init_dummybuf (mpeg2dec); /* Pump in some null buffers, because otherwise libmpeg2 doesn't @@ -585,316 +572,78 @@ handle_sequence (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) mpeg2_set_buf (mpeg2dec->decoder, mpeg2dec->dummybuf, NULL); mpeg2_set_buf (mpeg2dec->decoder, mpeg2dec->dummybuf, NULL); - mpeg2dec->need_sequence = FALSE; - done: return ret; -invalid_frame_period: +negotiate_failed: { - GST_WARNING_OBJECT (mpeg2dec, "Frame period is 0!"); - ret = GST_FLOW_ERROR; + GST_ELEMENT_ERROR (mpeg2dec, CORE, NEGOTIATION, (NULL), (NULL)); + ret = GST_FLOW_NOT_NEGOTIATED; goto done; } -invalid_size: - { - GST_ERROR_OBJECT (mpeg2dec, "Invalid frame dimensions: %d x %d", - sequence->width, sequence->height); - return GST_FLOW_ERROR; - } } static void clear_buffers (GstMpeg2dec * mpeg2dec) { - gint i; - GstVideoFrame *frame; - - for (i = 0; i < 4; i++) { - frame = &mpeg2dec->ip_frame[i]; - if (frame->buffer) { - gst_video_frame_unmap (frame); - gst_buffer_unref (frame->buffer); - frame->buffer = NULL; - } + GList *l; + while ((l = g_list_first (mpeg2dec->buffers))) { + gst_buffer_unref (GST_BUFFER (l->data)); + mpeg2dec->buffers = g_list_delete_link (mpeg2dec->buffers, l); } - frame = &mpeg2dec->b_frame; - if (frame->buffer) { - gst_video_frame_unmap (frame); - gst_buffer_unref (frame->buffer); - frame->buffer = NULL; - } -} - -static void -clear_queued (GstMpeg2dec * mpeg2dec) -{ - g_list_foreach (mpeg2dec->queued, (GFunc) gst_mini_object_unref, NULL); - g_list_free (mpeg2dec->queued); - mpeg2dec->queued = NULL; } static GstFlowReturn -flush_queued (GstMpeg2dec * mpeg2dec) +handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info, + GstVideoCodecFrame * frame) { - GstFlowReturn res = GST_FLOW_OK; - - while (mpeg2dec->queued) { - GstBuffer *buf = GST_BUFFER_CAST (mpeg2dec->queued->data); - - GST_LOG_OBJECT (mpeg2dec, "pushing buffer %p, timestamp %" - GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, buf, - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), - GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); - - /* iterate ouput queue an push downstream */ - res = gst_pad_push (mpeg2dec->srcpad, buf); - - mpeg2dec->queued = g_list_delete_link (mpeg2dec->queued, mpeg2dec->queued); - } - return res; -} - -static GstFlowReturn -handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) -{ - gboolean key_frame = FALSE; - GstBuffer *outbuf; - GstVideoFrame *frame; - GstFlowReturn ret; + GstFlowReturn ret = GST_FLOW_OK; gint type; - guint8 *buf[3]; + const gchar *type_str = NULL; + gboolean key_frame = FALSE; + const mpeg2_picture_t *picture = info->current_picture; - if (mpeg2dec->need_cropping && !mpeg2dec->has_cropping) { - GstAllocationParams params = { 0, }; - - /* we need to crop manually */ - params.align = 15; - outbuf = - gst_buffer_new_allocate (NULL, GST_VIDEO_INFO_SIZE (&mpeg2dec->vinfo), - ¶ms); - ret = GST_FLOW_OK; - } else { - ret = gst_buffer_pool_acquire_buffer (mpeg2dec->pool, &outbuf, NULL); - if (G_UNLIKELY (ret != GST_FLOW_OK)) - goto no_buffer; - } - - /* we store the original byteoffset of this picture in the stream here - * because we need it for indexing */ - GST_BUFFER_OFFSET (outbuf) = mpeg2dec->offset; - - if (info->current_picture) { - type = info->current_picture->flags & PIC_MASK_CODING_TYPE; - } else { - type = 0; - } - - GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %d", type); - - key_frame = type == PIC_FLAG_CODING_TYPE_I; + gst_mpeg2dec_alloc_buffer (mpeg2dec, mpeg2dec->offset, frame); + type = picture->flags & PIC_MASK_CODING_TYPE; switch (type) { case PIC_FLAG_CODING_TYPE_I: + key_frame = TRUE; mpeg2_skip (mpeg2dec->decoder, 0); - if (mpeg2dec->segment.rate < 0.0) { - /* negative rate, flush the queued pictures in reverse */ - GST_DEBUG_OBJECT (mpeg2dec, "flushing queued buffers"); - flush_queued (mpeg2dec); - } - /* fallthrough */ + type_str = "I"; + break; case PIC_FLAG_CODING_TYPE_P: - frame = &mpeg2dec->ip_frame[mpeg2dec->ip_framepos]; - GST_DEBUG_OBJECT (mpeg2dec, "I/P unref %p, ref %p", frame, outbuf); - mpeg2dec->ip_framepos = (mpeg2dec->ip_framepos + 1) & 3; + type_str = "P"; break; case PIC_FLAG_CODING_TYPE_B: - frame = &mpeg2dec->b_frame; - GST_DEBUG_OBJECT (mpeg2dec, "B unref %p, ref %p", frame, outbuf); + type_str = "B"; break; default: - goto unknown_frame; + gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame); + GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE, + ("decoding error"), ("Invalid picture type"), ret); + return ret; } - if (frame->buffer) { - gst_video_frame_unmap (frame); - gst_buffer_unref (frame->buffer); - frame->buffer = NULL; + GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %s", type_str); + GST_DEBUG_OBJECT (mpeg2dec, "picture %s, frame %i, offset %" G_GINT64_FORMAT, + key_frame ? ", kf," : " ", frame->system_frame_number, + GST_BUFFER_OFFSET (frame->output_buffer)); + + if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) { + GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_TFF); } - - if (mpeg2dec->need_cropping && mpeg2dec->has_cropping) { - GstVideoCropMeta *crop; - - crop = gst_buffer_add_video_crop_meta (outbuf); - /* we can do things slightly more efficient when we know that - * downstream understands clipping */ - crop->x = 0; - crop->y = 0; - crop->width = info->sequence->picture_width; - crop->height = info->sequence->picture_height; - } - - gst_video_frame_map (frame, &mpeg2dec->vinfo, outbuf, GST_MAP_WRITE); - - buf[0] = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); - buf[1] = GST_VIDEO_FRAME_PLANE_DATA (frame, 1); - buf[2] = GST_VIDEO_FRAME_PLANE_DATA (frame, 2); - - GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, outbuf %p", - buf[0], buf[1], buf[2], outbuf); - - mpeg2_set_buf (mpeg2dec->decoder, buf, frame); - - GST_DEBUG_OBJECT (mpeg2dec, "picture %s, outbuf %p, offset %" - G_GINT64_FORMAT, - key_frame ? ", kf," : " ", outbuf, GST_BUFFER_OFFSET (outbuf) - ); - - if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) - mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME; - - return ret; - -no_buffer: - { - return ret; - } -unknown_frame: - { - return ret; - } -} - -/* try to clip the buffer to the segment boundaries */ -static gboolean -clip_buffer (GstMpeg2dec * dec, GstBuffer * buf) -{ - gboolean res = TRUE; - GstClockTime in_ts, in_dur, stop; - guint64 cstart, cstop; - - in_ts = GST_BUFFER_TIMESTAMP (buf); - in_dur = GST_BUFFER_DURATION (buf); - - GST_LOG_OBJECT (dec, - "timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT, - GST_TIME_ARGS (in_ts), GST_TIME_ARGS (in_dur)); - - /* can't clip without TIME segment */ - if (dec->segment.format != GST_FORMAT_TIME) - goto beach; - - /* we need a start time */ - if (!GST_CLOCK_TIME_IS_VALID (in_ts)) - goto beach; - - /* generate valid stop, if duration unknown, we have unknown stop */ - stop = - GST_CLOCK_TIME_IS_VALID (in_dur) ? (in_ts + in_dur) : GST_CLOCK_TIME_NONE; - - /* now clip */ - if (!(res = gst_segment_clip (&dec->segment, GST_FORMAT_TIME, - in_ts, stop, &cstart, &cstop))) - goto beach; - - /* update timestamp and possibly duration if the clipped stop time is - * valid */ - GST_BUFFER_TIMESTAMP (buf) = cstart; - if (GST_CLOCK_TIME_IS_VALID (cstop)) - GST_BUFFER_DURATION (buf) = cstop - cstart; - -beach: - GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : "")); - return res; -} - -static GstFlowReturn -handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) -{ - GstBuffer *outbuf = NULL; - GstFlowReturn ret = GST_FLOW_OK; - const mpeg2_picture_t *picture; - gboolean key_frame = FALSE; - GstClockTime time; - GstVideoFrame *frame; - - GST_DEBUG_OBJECT (mpeg2dec, "picture slice/end %p %p %p %p", - info->display_fbuf, - info->display_picture, info->current_picture, - (info->display_fbuf ? info->display_fbuf->id : NULL)); - - if (!info->display_fbuf || !info->display_fbuf->id) - goto no_display; - - frame = (GstVideoFrame *) (info->display_fbuf->id); - outbuf = frame->buffer; - - picture = info->display_picture; - - key_frame = (picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_I; - - GST_DEBUG_OBJECT (mpeg2dec, "picture flags: %d, type: %d, keyframe: %d", - picture->flags, picture->flags & PIC_MASK_CODING_TYPE, key_frame); - - if (key_frame) { - GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); - mpeg2_skip (mpeg2dec->decoder, 0); - } else { - GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); - } - - if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_KEYFRAME && key_frame) - mpeg2dec->discont_state = MPEG2DEC_DISC_NONE; - - time = GST_CLOCK_TIME_NONE; - -#if MPEG2_RELEASE < MPEG2_VERSION(0,4,0) - if (picture->flags & PIC_FLAG_PTS) { - time = MPEG_TIME_TO_GST_TIME (picture->pts); - GST_DEBUG_OBJECT (mpeg2dec, "picture pts %" G_GUINT64_FORMAT - ", time %" GST_TIME_FORMAT, picture->pts, GST_TIME_ARGS (time)); - } -#else - if (picture->flags & PIC_FLAG_TAGS) { - guint64 pts = (((guint64) picture->tag2) << 32) | picture->tag; - - time = MPEG_TIME_TO_GST_TIME (pts); - GST_DEBUG_OBJECT (mpeg2dec, "picture tags %" G_GUINT64_FORMAT - ", time %" GST_TIME_FORMAT, pts, GST_TIME_ARGS (time)); - } -#endif - - if (time == GST_CLOCK_TIME_NONE) { - time = mpeg2dec->next_time; - GST_DEBUG_OBJECT (mpeg2dec, "picture didn't have pts"); - } else { - GST_DEBUG_OBJECT (mpeg2dec, - "picture had pts %" GST_TIME_FORMAT ", we had %" - GST_TIME_FORMAT, GST_TIME_ARGS (time), - GST_TIME_ARGS (mpeg2dec->next_time)); - mpeg2dec->next_time = time; - } - GST_BUFFER_TIMESTAMP (outbuf) = time; - - /* TODO set correct offset here based on frame number */ - if (info->display_picture_2nd) { - GST_BUFFER_DURATION (outbuf) = (picture->nb_fields + - info->display_picture_2nd->nb_fields) * mpeg2dec->frame_period / 2; - } else { - GST_BUFFER_DURATION (outbuf) = - picture->nb_fields * mpeg2dec->frame_period / 2; - } - mpeg2dec->next_time += GST_BUFFER_DURATION (outbuf); - - if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) - GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF); - #if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0) /* repeat field introduced in 0.5.0 */ - if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) - GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_RFF); + if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) { + GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_RFF); + } #endif + if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) { + mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME; + } + GST_DEBUG_OBJECT (mpeg2dec, "picture: %s %s %s %s %s fields:%d off:%" G_GINT64_FORMAT " ts:%" GST_TIME_FORMAT, @@ -907,179 +656,111 @@ handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) #endif (picture->flags & PIC_FLAG_SKIP ? "skip" : " "), (picture->flags & PIC_FLAG_COMPOSITE_DISPLAY ? "composite" : " "), - picture->nb_fields, GST_BUFFER_OFFSET (outbuf), - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); + picture->nb_fields, mpeg2dec->offset, GST_TIME_ARGS (frame->pts)); - if (picture->flags & PIC_FLAG_SKIP) - goto skip; - - if (mpeg2dec->discont_state != MPEG2DEC_DISC_NONE) - goto drop; - - /* check for clipping */ - if (!clip_buffer (mpeg2dec, outbuf)) - goto clipped; - - if (GST_CLOCK_TIME_IS_VALID (time)) { - gboolean need_skip; - GstClockTime qostime; - - /* qos needs to be done on running time */ - qostime = gst_segment_to_running_time (&mpeg2dec->segment, GST_FORMAT_TIME, - time); - - GST_OBJECT_LOCK (mpeg2dec); - /* check for QoS, don't perform the last steps of getting and - * pushing the buffers that are known to be late. */ - /* FIXME, we can also entirely skip decoding if the next valid buffer is - * known to be after a keyframe (using the granule_shift) */ - need_skip = mpeg2dec->earliest_time != -1 - && qostime <= mpeg2dec->earliest_time; - GST_OBJECT_UNLOCK (mpeg2dec); - - if (need_skip) { - GstMessage *qos_msg; - guint64 stream_time; - gint64 jitter; - - mpeg2dec->dropped++; - - stream_time = - gst_segment_to_stream_time (&mpeg2dec->segment, GST_FORMAT_TIME, - time); - jitter = GST_CLOCK_DIFF (qostime, mpeg2dec->earliest_time); - - qos_msg = - gst_message_new_qos (GST_OBJECT_CAST (mpeg2dec), FALSE, qostime, - stream_time, time, GST_BUFFER_DURATION (outbuf)); - gst_message_set_qos_values (qos_msg, jitter, mpeg2dec->proportion, - 1000000); - gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS, - mpeg2dec->processed, mpeg2dec->dropped); - gst_element_post_message (GST_ELEMENT_CAST (mpeg2dec), qos_msg); - - goto dropping_qos; - } - } - - mpeg2dec->processed++; - - /* ref before pushing it out, so we still have the ref in our - * array of buffers */ - gst_buffer_ref (outbuf); - - /* do cropping if the target region is smaller than the input one */ - if (mpeg2dec->need_cropping && !mpeg2dec->has_cropping) { - GST_DEBUG_OBJECT (mpeg2dec, "cropping buffer"); - ret = gst_mpeg2dec_crop_buffer (mpeg2dec, &outbuf, frame); - if (ret != GST_FLOW_OK) - goto done; - } - - if (mpeg2dec->segment.rate >= 0.0) { - /* forward: push right away */ - GST_LOG_OBJECT (mpeg2dec, "pushing buffer %p, timestamp %" - GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, - outbuf, - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), - GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf))); - GST_LOG_OBJECT (mpeg2dec, "... with flags %x", GST_BUFFER_FLAGS (outbuf)); - - ret = gst_pad_push (mpeg2dec->srcpad, outbuf); - GST_DEBUG_OBJECT (mpeg2dec, "pushed with result %s", - gst_flow_get_name (ret)); - } else { - /* reverse: queue, we'll push in reverse when we receive the next (previous) - * keyframe. */ - GST_DEBUG_OBJECT (mpeg2dec, "queued frame"); - mpeg2dec->queued = g_list_prepend (mpeg2dec->queued, outbuf); - ret = GST_FLOW_OK; - } -done: - return ret; - - /* special cases */ -no_display: - { - GST_DEBUG_OBJECT (mpeg2dec, "no picture to display"); - return GST_FLOW_OK; - } -skip: - { - GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of skip flag"); - return GST_FLOW_OK; - } -drop: - { - GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, discont state %d", - mpeg2dec->discont_state); - return GST_FLOW_OK; - } -clipped: - { - GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, clipped"); - return GST_FLOW_OK; - } -dropping_qos: - { - GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of QoS"); - return GST_FLOW_OK; - } + return GST_FLOW_OK; } static GstFlowReturn -gst_mpeg2dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) +handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info) { - GstMpeg2dec *mpeg2dec; - GstMapInfo map; - GstClockTime pts; + GstFlowReturn ret = GST_FLOW_OK; + GstVideoCodecFrame *frame; + const mpeg2_picture_t *picture; + gboolean key_frame = FALSE; + + GST_DEBUG_OBJECT (mpeg2dec, "picture slice/end %p %p %p %p", + info->display_fbuf, info->display_picture, info->current_picture, + info->display_fbuf->id); + + frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (mpeg2dec), + GPOINTER_TO_INT (info->display_fbuf->id)); + picture = info->display_picture; + key_frame = (picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_I; + + if (G_UNLIKELY (!frame)) { + GST_WARNING ("display buffer does not have a valid frame"); + return GST_FLOW_ERROR; + } + + GST_DEBUG_OBJECT (mpeg2dec, "picture flags: %d, type: %d, keyframe: %d", + picture->flags, picture->flags & PIC_MASK_CODING_TYPE, key_frame); + + if (key_frame) { + mpeg2_skip (mpeg2dec->decoder, 0); + } + + if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_KEYFRAME && key_frame) + mpeg2dec->discont_state = MPEG2DEC_DISC_NONE; + + if (mpeg2dec->index) { + gst_index_add_association (mpeg2dec->index, mpeg2dec->index_id, + (key_frame ? GST_ASSOCIATION_FLAG_KEY_UNIT : + GST_ASSOCIATION_FLAG_DELTA_UNIT), + GST_FORMAT_BYTES, GST_BUFFER_OFFSET (frame->output_buffer), + GST_FORMAT_TIME, frame->pts, 0); + } + + if (picture->flags & PIC_FLAG_SKIP) { + GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of skip flag"); + gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame); + mpeg2_skip (mpeg2dec->decoder, 1); + return GST_FLOW_OK; + } + + if (mpeg2dec->discont_state != MPEG2DEC_DISC_NONE) { + GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, discont state %d", + mpeg2dec->discont_state); + gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame); + return GST_FLOW_OK; + } + + /* do cropping if the target region is smaller than the input one */ + if (mpeg2dec->decoded_width != mpeg2dec->width || + mpeg2dec->decoded_height != mpeg2dec->height) { + if (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (mpeg2dec), + frame) < 0) { + GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer crop, too late"); + gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame); + return GST_FLOW_OK; + } + + ret = gst_mpeg2dec_crop_buffer (mpeg2dec, &frame->output_buffer); + } + + GST_DEBUG_OBJECT (mpeg2dec, "cropping buffer"); + gst_video_decoder_finish_frame (GST_VIDEO_DECODER (mpeg2dec), frame); + + return ret; +} + +static GstFlowReturn +gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame) +{ + GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); + GstBuffer *buf = frame->input_buffer; + guint32 size; + guint8 *data, *end; const mpeg2_info_t *info; mpeg2_state_t state; gboolean done = FALSE; GstFlowReturn ret = GST_FLOW_OK; - mpeg2dec = GST_MPEG2DEC (parent); - - gst_buffer_map (buf, &map, GST_MAP_READ); - pts = GST_BUFFER_TIMESTAMP (buf); - - if (GST_BUFFER_IS_DISCONT (buf)) { - GST_LOG_OBJECT (mpeg2dec, "DISCONT, reset decoder"); - /* when we receive a discont, reset our state as to not create too much - * distortion in the picture due to missing packets */ - mpeg2_reset (mpeg2dec->decoder, 0); - mpeg2_skip (mpeg2dec->decoder, 1); - mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; - } - GST_LOG_OBJECT (mpeg2dec, "received buffer, timestamp %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), - GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration)); + + size = GST_BUFFER_SIZE (buf); + data = GST_BUFFER_DATA (buf); info = mpeg2dec->info; + end = data + size; mpeg2dec->offset = GST_BUFFER_OFFSET (buf); - if (pts != GST_CLOCK_TIME_NONE) { - gint64 mpeg_pts = GST_TIME_TO_MPEG_TIME (pts); - - GST_DEBUG_OBJECT (mpeg2dec, - "have pts: %" G_GINT64_FORMAT " (%" GST_TIME_FORMAT ")", - mpeg_pts, GST_TIME_ARGS (MPEG_TIME_TO_GST_TIME (mpeg_pts))); - -#if MPEG2_RELEASE >= MPEG2_VERSION(0,4,0) - mpeg2_tag_picture (mpeg2dec->decoder, mpeg_pts & 0xffffffff, - mpeg_pts >> 32); -#else - mpeg2_pts (mpeg2dec->decoder, mpeg_pts); -#endif - } else { - GST_LOG ("no pts"); - } - GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer"); - mpeg2_buffer (mpeg2dec->decoder, map.data, map.data + map.size); + mpeg2_buffer (mpeg2dec->decoder, data, end); GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer done"); while (!done) { @@ -1100,19 +781,10 @@ gst_mpeg2dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) * could be done. */ if (ret == GST_FLOW_ERROR) { - mpeg2dec->error_count++; - GST_WARNING_OBJECT (mpeg2dec, "Decoding error #%d", - mpeg2dec->error_count); - if (mpeg2dec->error_count >= WARN_THRESHOLD && WARN_THRESHOLD > 0) { - GST_ELEMENT_WARNING (mpeg2dec, STREAM, DECODE, - ("%d consecutive decoding errors", mpeg2dec->error_count), - (NULL)); - } - mpeg2_reset (mpeg2dec->decoder, 0); - mpeg2_skip (mpeg2dec->decoder, 1); - mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; - - goto exit; + GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, + ("decoding error"), ("Bad sequence header"), ret); + gst_mpeg2dec_reset (decoder, 0); + goto done; } break; case STATE_SEQUENCE_REPEATED: @@ -1122,7 +794,7 @@ gst_mpeg2dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) GST_DEBUG_OBJECT (mpeg2dec, "gop"); break; case STATE_PICTURE: - ret = handle_picture (mpeg2dec, info); + ret = handle_picture (mpeg2dec, info, frame); break; case STATE_SLICE_1ST: GST_LOG_OBJECT (mpeg2dec, "1st slice of frame encountered"); @@ -1137,265 +809,46 @@ gst_mpeg2dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) #endif case STATE_END: GST_DEBUG_OBJECT (mpeg2dec, "end"); - mpeg2dec->need_sequence = TRUE; case STATE_SLICE: - ret = handle_slice (mpeg2dec, info); + if (info->display_fbuf && info->display_fbuf->id) { + ret = handle_slice (mpeg2dec, info); + } else { + GST_DEBUG_OBJECT (mpeg2dec, "no picture to display"); + } + if (info->discard_fbuf && info->discard_fbuf->id) { + GList *l = g_list_find (mpeg2dec->buffers, info->discard_fbuf->id); + if (l) { + gst_buffer_unref (GST_BUFFER (l->data)); + mpeg2dec->buffers = g_list_delete_link (mpeg2dec->buffers, l); + } + } + if (state != STATE_SLICE) { + clear_buffers (mpeg2dec); + } break; case STATE_BUFFER: done = TRUE; break; /* error */ case STATE_INVALID: - /* FIXME: at some point we should probably send newsegment events to - * let downstream know that parts of the stream are missing */ - mpeg2dec->error_count++; - GST_WARNING_OBJECT (mpeg2dec, "Decoding error #%d", - mpeg2dec->error_count); - if (mpeg2dec->error_count >= WARN_THRESHOLD && WARN_THRESHOLD > 0) { - GST_ELEMENT_WARNING (mpeg2dec, STREAM, DECODE, - ("%d consecutive decoding errors", mpeg2dec->error_count), - (NULL)); - } + GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, + ("decoding error"), ("Reached libmpeg2 invalid state"), ret); continue; default: GST_ERROR_OBJECT (mpeg2dec, "Unknown libmpeg2 state %d, FIXME", state); - goto exit; + ret = GST_FLOW_OK; + goto done; } - mpeg2dec->error_count = 0; - - /* - * FIXME: should pass more information such as state the user data is from - */ -#ifdef enable_user_data - if (info->user_data_len > 0) { - GstBuffer *udbuf = - gst_buffer_new_allocate (NULL, info->user_data_len, NULL); - - gst_buffer_fill (udbuf, 0, info->user_data, info->user_data_len); - - gst_pad_push (mpeg2dec->userdatapad, udbuf); - } -#endif - if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (mpeg2dec, "exit loop, reason %s", gst_flow_get_name (ret)); break; } } -done: - gst_buffer_unmap (buf, &map); - gst_buffer_unref (buf); - return ret; - - /* errors */ -exit: - { - ret = GST_FLOW_OK; - goto done; - } -} - -static gboolean -gst_mpeg2dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) -{ - GstMpeg2dec *mpeg2dec; - gboolean ret = TRUE; - - mpeg2dec = GST_MPEG2DEC (parent); - - GST_DEBUG_OBJECT (mpeg2dec, "Got %s event on sink pad", - GST_EVENT_TYPE_NAME (event)); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_CAPS: - { - GstCaps *caps; - - gst_event_parse_caps (event, &caps); - ret = gst_mpeg2dec_setcaps (pad, caps); - gst_event_unref (event); - break; - } - case GST_EVENT_SEGMENT: - { - GstSegment seg; - - gst_event_copy_segment (event, &seg); - - /* we need TIME */ - if (seg.format != GST_FORMAT_TIME) - goto newseg_wrong_format; - - /* now configure the values */ - mpeg2dec->segment = seg; - - GST_DEBUG_OBJECT (mpeg2dec, "Pushing seg %" GST_SEGMENT_FORMAT, &seg); - - ret = gst_pad_push_event (mpeg2dec->srcpad, event); - break; - } - case GST_EVENT_FLUSH_START: - ret = gst_pad_push_event (mpeg2dec->srcpad, event); - break; - case GST_EVENT_FLUSH_STOP: - { - mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; - mpeg2dec->next_time = -1;; - gst_mpeg2dec_qos_reset (mpeg2dec); - mpeg2_reset (mpeg2dec->decoder, 0); - mpeg2_skip (mpeg2dec->decoder, 1); - clear_queued (mpeg2dec); - ret = gst_pad_push_event (mpeg2dec->srcpad, event); - break; - } - case GST_EVENT_EOS: - ret = gst_pad_push_event (mpeg2dec->srcpad, event); - break; - default: - ret = gst_pad_push_event (mpeg2dec->srcpad, event); - break; - } done: - return ret; - - /* ERRORS */ -newseg_wrong_format: - { - GST_DEBUG_OBJECT (mpeg2dec, "received non TIME newsegment"); - gst_event_unref (event); - goto done; - } -} - -static gboolean -gst_mpeg2dec_setcaps (GstPad * pad, GstCaps * caps) -{ - GstMpeg2dec *mpeg2dec; - GstStructure *s; - - mpeg2dec = GST_MPEG2DEC (gst_pad_get_parent (pad)); - - s = gst_caps_get_structure (caps, 0); - - /* parse the par, this overrides the encoded par */ - mpeg2dec->have_par = gst_structure_get_fraction (s, "pixel-aspect-ratio", - &mpeg2dec->in_par_n, &mpeg2dec->in_par_d); - - gst_object_unref (mpeg2dec); - - return TRUE; -} - -static gboolean -gst_mpeg2dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event) -{ - gboolean res; - GstMpeg2dec *mpeg2dec; - - mpeg2dec = GST_MPEG2DEC (parent); - - if (mpeg2dec->decoder == NULL) - goto no_decoder; - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_QOS: - { - GstQOSType type; - gdouble proportion; - GstClockTimeDiff diff; - GstClockTime timestamp; - - gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp); - - GST_OBJECT_LOCK (mpeg2dec); - mpeg2dec->proportion = proportion; - mpeg2dec->earliest_time = timestamp + diff; - GST_OBJECT_UNLOCK (mpeg2dec); - - GST_DEBUG_OBJECT (mpeg2dec, - "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT, - GST_TIME_ARGS (timestamp), diff); - - res = gst_pad_push_event (mpeg2dec->sinkpad, event); - break; - } - case GST_EVENT_SEEK: - case GST_EVENT_NAVIGATION: - /* Forward unchanged */ - default: - res = gst_pad_push_event (mpeg2dec->sinkpad, event); - break; - } - return res; - -no_decoder: - { - GST_DEBUG_OBJECT (mpeg2dec, "no decoder, cannot handle event"); - gst_event_unref (event); - return FALSE; - } -} - -static GstStateChangeReturn -gst_mpeg2dec_change_state (GstElement * element, GstStateChange transition) -{ - GstStateChangeReturn ret; - GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (element); - - switch (transition) { - case GST_STATE_CHANGE_NULL_TO_READY: - mpeg2_accel (MPEG2_ACCEL_DETECT); - if ((mpeg2dec->decoder = mpeg2_init ()) == NULL) - goto init_failed; - mpeg2dec->info = mpeg2_info (mpeg2dec->decoder); - break; - case GST_STATE_CHANGE_READY_TO_PAUSED: - gst_mpeg2dec_reset (mpeg2dec); - gst_mpeg2dec_qos_reset (mpeg2dec); - break; - case GST_STATE_CHANGE_PAUSED_TO_PLAYING: - default: - break; - } - - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - - switch (transition) { - case GST_STATE_CHANGE_PLAYING_TO_PAUSED: - break; - case GST_STATE_CHANGE_PAUSED_TO_READY: - gst_mpeg2dec_qos_reset (mpeg2dec); - clear_queued (mpeg2dec); - if (mpeg2dec->pool) { - gst_buffer_pool_set_active (mpeg2dec->pool, FALSE); - gst_object_unref (mpeg2dec->pool); - mpeg2dec->pool = NULL; - } - break; - case GST_STATE_CHANGE_READY_TO_NULL: - if (mpeg2dec->decoder) { - mpeg2_close (mpeg2dec->decoder); - mpeg2dec->decoder = NULL; - mpeg2dec->info = NULL; - } - clear_buffers (mpeg2dec); - break; - default: - break; - } - return ret; - - /* ERRORS */ -init_failed: - { - GST_ELEMENT_ERROR (mpeg2dec, LIBRARY, INIT, - (NULL), ("Failed to initialize libmpeg2 library")); - return GST_STATE_CHANGE_FAILURE; - } } static gboolean @@ -1410,6 +863,6 @@ plugin_init (GstPlugin * plugin) GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR, - mpeg2dec, + "mpeg2dec", "LibMpeg2 decoder", plugin_init, VERSION, "GPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN); diff --git a/ext/mpeg2dec/gstmpeg2dec.h b/ext/mpeg2dec/gstmpeg2dec.h index eaf6fa4658..edeba03758 100644 --- a/ext/mpeg2dec/gstmpeg2dec.h +++ b/ext/mpeg2dec/gstmpeg2dec.h @@ -24,8 +24,7 @@ #include #include -#include -#include +#include #include G_BEGIN_DECLS @@ -55,12 +54,7 @@ typedef enum } DiscontState; struct _GstMpeg2dec { - GstElement element; - - /* pads */ - GstPad *sinkpad, - *srcpad, - *userdatapad; + GstVideoDecoder element; mpeg2dec_t *decoder; const mpeg2_info_t *info; @@ -68,30 +62,28 @@ struct _GstMpeg2dec { gboolean closed; gboolean have_fbuf; - /* buffer management */ - guint ip_framepos; - GstVideoFrame ip_frame[4]; - GstVideoFrame b_frame; + /* Buffer lifetime management */ + GList *buffers; + /* FIXME This should not be necessary. It is used to prevent image + * corruption when the parser does not behave the way it should. + * See https://bugzilla.gnome.org/show_bug.cgi?id=674238 + */ DiscontState discont_state; /* the timestamp of the next frame */ GstClockTime next_time; - GstSegment segment; - - /* whether we have a pixel aspect ratio from the sink caps */ - gboolean have_par; - gint in_par_n; - gint in_par_d; /* video state */ - GstVideoInfo vinfo; - GstVideoInfo cinfo; - gboolean need_cropping; - gboolean has_cropping; + GstVideoCodecState *input_state; + gint width; + gint height; + gint decoded_width; + gint decoded_height; + gint pixel_width; + gint pixel_height; gint64 frame_period; gboolean interlaced; - GstBufferPool *pool; gint size; gint u_offs; @@ -99,25 +91,20 @@ struct _GstMpeg2dec { guint8 *dummybuf[4]; guint64 offset; - gboolean need_sequence; + gint fps_n; + gint fps_d; + + GstIndex *index; + gint index_id; - gint error_count; gboolean can_allocate_aligned; - /* QoS stuff */ /* with LOCK*/ - gdouble proportion; - GstClockTime earliest_time; - guint64 processed; - guint64 dropped; - - /* gather/decode queues for reverse playback */ - GList *gather; - GList *decode; - GList *queued; + /* whether we have a pixel aspect ratio from the sink caps */ + gboolean have_par; }; struct _GstMpeg2decClass { - GstElementClass parent_class; + GstVideoDecoderClass parent_class; }; GType gst_mpeg2dec_get_type(void);