mpeg2: add new decoded picture buffer infrastructure.

Decoded pictures are now maintained into DPB, similarly to H.264.
The same mechanism could be re-used for VC-1 and MPEG-4:2 codecs.
This commit is contained in:
Gwenole Beauchesne 2012-03-28 14:36:30 +02:00
parent b56ac16224
commit ac4fc0d36c
4 changed files with 557 additions and 53 deletions

View file

@ -148,6 +148,7 @@ endif
if USE_CODEC_PARSERS
libgstvaapi_source_c += \
gstvaapicodec_objects.c \
gstvaapidecoder_dpb.c \
gstvaapidecoder_h264.c \
gstvaapidecoder_mpeg2.c \
gstvaapidecoder_mpeg4.c \
@ -162,6 +163,7 @@ libgstvaapi_source_h += \
$(NULL)
libgstvaapi_source_priv_h += \
gstvaapicodec_objects.h \
gstvaapidecoder_dpb.h \
gstvaapidecoder_objects.h \
$(NULL)
libgstvaapi_cflags += $(GST_CODEC_PARSERS_CFLAGS)

View file

@ -0,0 +1,328 @@
/*
* gstvaapidecoder_dpb.c - Decoded Picture Buffer
*
* Copyright (C) 2012 Intel Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
#include "sysdeps.h"
#include "gstvaapidecoder_dpb.h"
#define DEBUG 1
#include "gstvaapidebug.h"
/* ------------------------------------------------------------------------- */
/* --- Common Decoded Picture Buffer utilities --- */
/* ------------------------------------------------------------------------- */
static GstVaapiDpb *
dpb_new(GType type, guint max_pictures)
{
GstMiniObject *obj;
GstVaapiDpb *dpb;
g_return_val_if_fail(max_pictures > 0, NULL);
obj = gst_mini_object_new(type);
if (!obj)
return NULL;
dpb = GST_VAAPI_DPB_CAST(obj);
dpb->pictures = g_new0(GstVaapiPicture *, max_pictures);
if (!dpb->pictures)
goto error;
dpb->max_pictures = max_pictures;
return dpb;
error:
gst_mini_object_unref(obj);
return NULL;
}
static gint
dpb_get_oldest(GstVaapiDpb *dpb, gboolean output)
{
gint i, lowest_pts_index;
for (i = 0; i < dpb->num_pictures; i++) {
if ((GST_VAAPI_PICTURE_IS_OUTPUT(dpb->pictures[i]) ^ output) == 0)
break;
}
if (i == dpb->num_pictures)
return -1;
lowest_pts_index = i++;
for (; i < dpb->num_pictures; i++) {
GstVaapiPicture * const picture = dpb->pictures[i];
if ((GST_VAAPI_PICTURE_IS_OUTPUT(picture) ^ output) != 0)
continue;
if (picture->pts < dpb->pictures[lowest_pts_index]->pts)
lowest_pts_index = i;
}
return lowest_pts_index;
}
static void
dpb_remove_index(GstVaapiDpb *dpb, guint index)
{
GstVaapiPicture ** const pictures = dpb->pictures;
guint num_pictures = --dpb->num_pictures;
if (index != num_pictures)
gst_vaapi_picture_replace(&pictures[index], pictures[num_pictures]);
gst_vaapi_picture_replace(&pictures[num_pictures], NULL);
}
static inline gboolean
dpb_output(GstVaapiDpb *dpb, GstVaapiPicture *picture)
{
return gst_vaapi_picture_output(picture);
}
static gboolean
dpb_bump(GstVaapiDpb *dpb)
{
gint index;
gboolean success;
index = dpb_get_oldest(dpb, FALSE);
if (index < 0)
return FALSE;
success = dpb_output(dpb, dpb->pictures[index]);
if (!GST_VAAPI_PICTURE_IS_REFERENCE(dpb->pictures[index]))
dpb_remove_index(dpb, index);
return success;
}
static void
dpb_clear(GstVaapiDpb *dpb)
{
guint i;
for (i = 0; i < dpb->num_pictures; i++)
gst_vaapi_picture_replace(&dpb->pictures[i], NULL);
dpb->num_pictures = 0;
}
/* ------------------------------------------------------------------------- */
/* --- Base Decoded Picture Buffer --- */
/* ------------------------------------------------------------------------- */
G_DEFINE_TYPE(GstVaapiDpb, gst_vaapi_dpb, GST_TYPE_MINI_OBJECT)
static void
gst_vaapi_dpb_base_flush(GstVaapiDpb *dpb)
{
while (dpb_bump(dpb))
;
dpb_clear(dpb);
}
static gboolean
gst_vaapi_dpb_base_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
{
guint i;
// Remove all unused pictures
i = 0;
while (i < dpb->num_pictures) {
GstVaapiPicture * const picture = dpb->pictures[i];
if (GST_VAAPI_PICTURE_IS_OUTPUT(picture) &&
!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
dpb_remove_index(dpb, i);
else
i++;
}
// Store reference decoded picture into the DPB
if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
while (dpb->num_pictures == dpb->max_pictures) {
if (!dpb_bump(dpb))
return FALSE;
}
}
// Store non-reference decoded picture into the DPB
else {
if (GST_VAAPI_PICTURE_IS_SKIPPED(picture))
return TRUE;
while (dpb->num_pictures == dpb->max_pictures) {
for (i = 0; i < dpb->num_pictures; i++) {
if (!GST_VAAPI_PICTURE_IS_OUTPUT(picture) &&
dpb->pictures[i]->pts < picture->pts)
break;
}
if (i == dpb->num_pictures)
return dpb_output(dpb, picture);
if (!dpb_bump(dpb))
return FALSE;
}
}
gst_vaapi_picture_replace(&dpb->pictures[dpb->num_pictures++], picture);
return TRUE;
}
static void
gst_vaapi_dpb_finalize(GstMiniObject *object)
{
GstVaapiDpb * const dpb = GST_VAAPI_DPB_CAST(object);
GstMiniObjectClass *parent_class;
if (dpb->pictures) {
dpb_clear(dpb);
g_free(dpb->pictures);
}
parent_class = GST_MINI_OBJECT_CLASS(gst_vaapi_dpb_parent_class);
if (parent_class->finalize)
parent_class->finalize(object);
}
static void
gst_vaapi_dpb_init(GstVaapiDpb *dpb)
{
dpb->pictures = NULL;
dpb->num_pictures = 0;
dpb->max_pictures = 0;
}
static void
gst_vaapi_dpb_class_init(GstVaapiDpbClass *klass)
{
GstMiniObjectClass * const object_class = GST_MINI_OBJECT_CLASS(klass);
object_class->finalize = gst_vaapi_dpb_finalize;
klass->flush = gst_vaapi_dpb_base_flush;
klass->add = gst_vaapi_dpb_base_add;
}
void
gst_vaapi_dpb_flush(GstVaapiDpb *dpb)
{
GstVaapiDpbClass *klass;
g_return_if_fail(GST_VAAPI_IS_DPB(dpb));
klass = GST_VAAPI_DPB_GET_CLASS(dpb);
if (G_UNLIKELY(!klass || !klass->add))
return;
klass->flush(dpb);
}
gboolean
gst_vaapi_dpb_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
{
GstVaapiDpbClass *klass;
g_return_val_if_fail(GST_VAAPI_IS_DPB(dpb), FALSE);
g_return_val_if_fail(GST_VAAPI_IS_PICTURE(picture), FALSE);
klass = GST_VAAPI_DPB_GET_CLASS(dpb);
if (G_UNLIKELY(!klass || !klass->add))
return FALSE;
return klass->add(dpb, picture);
}
/* ------------------------------------------------------------------------- */
/* --- MPEG-2 Decoded Picture Buffer --- */
/* ------------------------------------------------------------------------- */
G_DEFINE_TYPE(GstVaapiDpbMpeg2, gst_vaapi_dpb_mpeg2, GST_VAAPI_TYPE_DPB)
static gboolean
gst_vaapi_dpb_mpeg2_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
{
GstVaapiPicture *ref_picture;
gint index = -1;
g_return_val_if_fail(GST_VAAPI_IS_DPB_MPEG2(dpb), FALSE);
/*
* Purpose: only store reference decoded pictures into the DPB
*
* This means:
* - non-reference decoded pictures are output immediately
* - ... thus causing older reference pictures to be output, if not already
* - the oldest reference picture is replaced with the new reference picture
*/
if (G_LIKELY(dpb->num_pictures == 2)) {
index = (dpb->pictures[0]->pts > dpb->pictures[1]->pts);
ref_picture = dpb->pictures[index];
if (!GST_VAAPI_PICTURE_IS_OUTPUT(ref_picture)) {
if (!dpb_output(dpb, ref_picture))
return FALSE;
}
}
if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
return dpb_output(dpb, picture);
if (index < 0)
index = dpb->num_pictures++;
gst_vaapi_picture_replace(&dpb->pictures[index], picture);
return TRUE;
}
static void
gst_vaapi_dpb_mpeg2_init(GstVaapiDpbMpeg2 *dpb)
{
}
static void
gst_vaapi_dpb_mpeg2_class_init(GstVaapiDpbMpeg2Class *klass)
{
GstVaapiDpbClass * const dpb_class = GST_VAAPI_DPB_CLASS(klass);
dpb_class->add = gst_vaapi_dpb_mpeg2_add;
}
GstVaapiDpb *
gst_vaapi_dpb_mpeg2_new(void)
{
return dpb_new(GST_VAAPI_TYPE_DPB_MPEG2, 2);
}
void
gst_vaapi_dpb_mpeg2_get_references(
GstVaapiDpb *dpb,
GstVaapiPicture *picture,
GstVaapiPicture **prev_picture_ptr,
GstVaapiPicture **next_picture_ptr
)
{
GstVaapiPicture **picture_ptr, *ref_picture, *ref_pictures[2];
guint i, index;
g_return_if_fail(GST_VAAPI_IS_DPB_MPEG2(dpb));
g_return_if_fail(GST_VAAPI_IS_PICTURE(picture));
ref_pictures[0] = NULL;
ref_pictures[1] = NULL;
for (i = 0; i < dpb->num_pictures; i++) {
ref_picture = dpb->pictures[i];
index = ref_picture->pts > picture->pts;
picture_ptr = &ref_pictures[index];
if (!*picture_ptr || ((*picture_ptr)->pts > ref_picture->pts) == index)
*picture_ptr = ref_picture;
}
if (prev_picture_ptr)
*prev_picture_ptr = ref_pictures[0];
if (next_picture_ptr)
*next_picture_ptr = ref_pictures[1];
}

View file

@ -0,0 +1,187 @@
/*
* gstvaapidecoder_dpb.h - Decoded Picture Buffer
*
* Copyright (C) 2012 Intel Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
#ifndef GST_VAAPI_DECODER_DPB_H
#define GST_VAAPI_DECODER_DPB_H
#include <gst/vaapi/gstvaapidecoder_objects.h>
G_BEGIN_DECLS
typedef struct _GstVaapiDpb GstVaapiDpb;
typedef struct _GstVaapiDpbClass GstVaapiDpbClass;
typedef struct _GstVaapiDpbMpeg2 GstVaapiDpbMpeg2;
typedef struct _GstVaapiDpbMpeg2Class GstVaapiDpbMpeg2Class;
/* ------------------------------------------------------------------------- */
/* --- Base Decoded Picture Buffer --- */
/* ------------------------------------------------------------------------- */
#define GST_VAAPI_TYPE_DPB \
(gst_vaapi_dpb_get_type())
#define GST_VAAPI_DPB_CAST(obj) \
((GstVaapiDpb *)(obj))
#define GST_VAAPI_DPB(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), \
GST_VAAPI_TYPE_DPB, \
GstVaapiDpb))
#define GST_VAAPI_DPB_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), \
GST_VAAPI_TYPE_DPB, \
GstVaapiDpbClass))
#define GST_VAAPI_IS_DPB(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_VAAPI_TYPE_DPB))
#define GST_VAAPI_IS_DPB_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_VAAPI_TYPE_DPB))
#define GST_VAAPI_DPB_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), \
GST_VAAPI_TYPE_DPB, \
GstVaapiDpbClass))
/**
* GstVaapiDpb:
*
* A decoded picture buffer (DPB) object.
*/
struct _GstVaapiDpb {
/*< private >*/
GstMiniObject parent_instance;
/*< protected >*/
GstVaapiPicture **pictures;
guint num_pictures;
guint max_pictures;
};
/**
* GstVaapiDpbClass:
*
* The #GstVaapiDpb base class.
*/
struct _GstVaapiDpbClass {
/*< private >*/
GstMiniObjectClass parent_class;
/*< protected >*/
void (*flush) (GstVaapiDpb *dpb);
gboolean (*add) (GstVaapiDpb *dpb, GstVaapiPicture *picture);
};
GType
gst_vaapi_dpb_get_type(void)
attribute_hidden;
void
gst_vaapi_dpb_flush(GstVaapiDpb *dpb)
attribute_hidden;
gboolean
gst_vaapi_dpb_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
attribute_hidden;
static inline gpointer
gst_vaapi_dpb_ref(gpointer ptr)
{
return gst_mini_object_ref(GST_MINI_OBJECT(ptr));
}
static inline void
gst_vaapi_dpb_unref(gpointer ptr)
{
gst_mini_object_unref(GST_MINI_OBJECT(ptr));
}
/* ------------------------------------------------------------------------- */
/* --- MPEG-2 Decoded Picture Buffer --- */
/* ------------------------------------------------------------------------- */
#define GST_VAAPI_TYPE_DPB_MPEG2 \
(gst_vaapi_dpb_mpeg2_get_type())
#define GST_VAAPI_DPB_MPEG2_CAST(obj) \
((GstVaapiDpbMpeg2 *)(obj))
#define GST_VAAPI_DPB_MPEG2(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), \
GST_VAAPI_TYPE_DPB_MPEG2, \
GstVaapiDpbMpeg2))
#define GST_VAAPI_DPB_MPEG2_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), \
GST_VAAPI_TYPE_DPB_MPEG2, \
GstVaapiDpbMpeg2Class))
#define GST_VAAPI_IS_DPB_MPEG2(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_VAAPI_TYPE_DPB_MPEG2))
#define GST_VAAPI_IS_DPB_MPEG2_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_VAAPI_TYPE_DPB_MPEG2))
#define GST_VAAPI_DPB_MPEG2_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), \
GST_VAAPI_TYPE_DPB_MPEG2, \
GstVaapiDpbMpeg2Class))
/**
* GstVaapiDpbMpeg2:
*
* A decoded picture buffer (DPB_MPEG2) object.
*/
struct _GstVaapiDpbMpeg2 {
/*< private >*/
GstVaapiDpb parent_instance;
};
/**
* GstVaapiDpbMpeg2Class:
*
* The #GstVaapiDpbMpeg2 base class.
*/
struct _GstVaapiDpbMpeg2Class {
/*< private >*/
GstVaapiDpbClass parent_class;
};
GType
gst_vaapi_dpb_mpeg2_get_type(void)
attribute_hidden;
GstVaapiDpb *
gst_vaapi_dpb_mpeg2_new(void)
attribute_hidden;
void
gst_vaapi_dpb_mpeg2_get_references(
GstVaapiDpb *dpb,
GstVaapiPicture *picture,
GstVaapiPicture **prev_picture_ptr,
GstVaapiPicture **next_picture_ptr
) attribute_hidden;
G_END_DECLS
#endif /* GST_VAAPI_DECODER_DPB */

View file

@ -30,6 +30,7 @@
#include <gst/codecparsers/gstmpegvideoparser.h>
#include "gstvaapidecoder_mpeg2.h"
#include "gstvaapidecoder_objects.h"
#include "gstvaapidecoder_dpb.h"
#include "gstvaapidecoder_priv.h"
#include "gstvaapidisplay_priv.h"
#include "gstvaapiobject_priv.h"
@ -72,8 +73,7 @@ struct _GstVaapiDecoderMpeg2Private {
GstMpegVideoPictureExt pic_ext;
GstMpegVideoQuantMatrixExt quant_matrix_ext;
GstVaapiPicture *current_picture;
GstVaapiPicture *next_picture;
GstVaapiPicture *prev_picture;
GstVaapiDpb *dpb;
GstAdapter *adapter;
GstClockTime seq_pts;
GstClockTime gop_pts;
@ -177,8 +177,11 @@ gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
gst_vaapi_picture_replace(&priv->next_picture, NULL);
gst_vaapi_picture_replace(&priv->prev_picture, NULL);
if (priv->dpb) {
gst_vaapi_dpb_unref(priv->dpb);
priv->dpb = NULL;
}
if (priv->adapter) {
gst_adapter_clear(priv->adapter);
@ -197,6 +200,10 @@ gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder, GstBuffer *buffer)
priv->adapter = gst_adapter_new();
if (!priv->adapter)
return FALSE;
priv->dpb = gst_vaapi_dpb_mpeg2_new();
if (!priv->dpb)
return FALSE;
return TRUE;
}
@ -324,32 +331,20 @@ ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline GstVaapiDecoderStatus
render_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
if (!gst_vaapi_picture_output(picture))
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
static gboolean
decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
if (picture) {
if (!gst_vaapi_picture_decode(picture))
status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
if ((priv->prev_picture && priv->next_picture) ||
(priv->closed_gop && priv->next_picture))
status = render_picture(decoder, picture);
}
return FALSE;
if (!gst_vaapi_dpb_add(priv->dpb, picture))
return FALSE;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
}
return status;
return TRUE;
}
static GstVaapiDecoderStatus
@ -437,19 +432,11 @@ static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiDecoderStatus status;
if (priv->current_picture) {
status = decode_current_picture(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
if (priv->current_picture && !decode_current_picture(decoder))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
if (priv->next_picture) {
status = render_picture(decoder, priv->next_picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
gst_vaapi_dpb_flush(priv->dpb);
return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
}
@ -516,11 +503,8 @@ decode_picture(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
return status;
}
if (priv->current_picture) {
status = decode_current_picture(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
if (priv->current_picture && !decode_current_picture(decoder))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
priv->current_picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
if (!priv->current_picture) {
@ -543,9 +527,11 @@ decode_picture(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_I:
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
picture->type = GST_VAAPI_PICTURE_TYPE_I;
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
picture->type = GST_VAAPI_PICTURE_TYPE_P;
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
@ -560,15 +546,6 @@ decode_picture(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
pts = priv->gop_pts;
pts += gst_util_uint64_scale(pic_hdr->tsn, GST_SECOND * priv->fps_d, priv->fps_n);
picture->pts = pts + priv->pts_diff;
/* Update reference pictures */
if (pic_hdr->pic_type != GST_MPEG_VIDEO_PICTURE_TYPE_B) {
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
if (priv->next_picture)
status = render_picture(decoder, priv->next_picture);
gst_vaapi_picture_replace(&priv->prev_picture, priv->next_picture);
gst_vaapi_picture_replace(&priv->next_picture, picture);
}
return status;
}
@ -623,6 +600,7 @@ fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr;
GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext;
GstVaapiPicture *prev_picture, *next_picture;
if (!priv->has_pic_ext)
return FALSE;
@ -650,14 +628,25 @@ fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
COPY_FIELD(picture_coding_extension, bits, progressive_frame);
gst_vaapi_dpb_mpeg2_get_references(
priv->dpb,
picture,
&prev_picture,
&next_picture
);
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
if (priv->next_picture)
pic_param->backward_reference_picture = priv->next_picture->surface_id;
// fall-through
if (next_picture)
pic_param->backward_reference_picture = next_picture->surface_id;
if (prev_picture)
pic_param->forward_reference_picture = prev_picture->surface_id;
else if (!priv->closed_gop)
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
if (priv->prev_picture)
pic_param->forward_reference_picture = priv->prev_picture->surface_id;
if (prev_picture)
pic_param->forward_reference_picture = prev_picture->surface_id;
break;
}
return TRUE;
@ -932,8 +921,6 @@ gst_vaapi_decoder_mpeg2_init(GstVaapiDecoderMpeg2 *decoder)
priv->fps_d = 0;
priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
priv->current_picture = NULL;
priv->next_picture = NULL;
priv->prev_picture = NULL;
priv->adapter = NULL;
priv->seq_pts = GST_CLOCK_TIME_NONE;
priv->gop_pts = GST_CLOCK_TIME_NONE;