gstreamer/gst-libs/gst/vaapi/gstvaapidecoder_mpeg2.c
Gwenole Beauchesne 2c449e42ca decoder: refactor decoder unit API.
Allocate decoder unit earlier in the main parse() function and don't
delegate this task to derived classes. The ultimate purpose is to get
rid of dynamic allocation of decoder units.
2013-01-07 14:10:13 +01:00

1494 lines
50 KiB
C

/*
* gstvaapidecoder_mpeg2.c - MPEG-2 decoder
*
* Copyright (C) 2011 Intel Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
/**
* SECTION:gstvaapidecoder_mpeg2
* @short_description: MPEG-2 decoder
*/
#include "sysdeps.h"
#include <string.h>
#include <gst/base/gstbitreader.h>
#include <gst/codecparsers/gstmpegvideoparser.h>
#include "gstvaapidecoder_mpeg2.h"
#include "gstvaapidecoder_objects.h"
#include "gstvaapidecoder_dpb.h"
#include "gstvaapidecoder_priv.h"
#include "gstvaapidisplay_priv.h"
#include "gstvaapiobject_priv.h"
#define DEBUG 1
#include "gstvaapidebug.h"
/* ------------------------------------------------------------------------- */
/* --- VLC Reader --- */
/* ------------------------------------------------------------------------- */
#define READ_UINT8(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define SKIP(reader, nbits) G_STMT_START { \
if (!gst_bit_reader_skip (reader, nbits)) { \
GST_WARNING ("failed to skip nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
/* VLC decoder from gst-plugins-bad */
typedef struct _VLCTable VLCTable;
struct _VLCTable {
gint value;
guint cword;
guint cbits;
};
static gboolean
decode_vlc(GstBitReader *br, gint *res, const VLCTable *table, guint length)
{
guint8 i;
guint cbits = 0;
guint32 value = 0;
for (i = 0; i < length; i++) {
if (cbits != table[i].cbits) {
cbits = table[i].cbits;
if (!gst_bit_reader_peek_bits_uint32(br, &value, cbits)) {
goto failed;
}
}
if (value == table[i].cword) {
SKIP(br, cbits);
if (res)
*res = table[i].value;
return TRUE;
}
}
GST_DEBUG("failed to find VLC code");
failed:
GST_WARNING("failed to decode VLC, returning");
return FALSE;
}
enum {
GST_MPEG_VIDEO_MACROBLOCK_ESCAPE = -1,
};
/* Table B-1: Variable length codes for macroblock_address_increment */
static const VLCTable mpeg2_mbaddr_vlc_table[] = {
{ 1, 0x01, 1 },
{ 2, 0x03, 3 },
{ 3, 0x02, 3 },
{ 4, 0x03, 4 },
{ 5, 0x02, 4 },
{ 6, 0x03, 5 },
{ 7, 0x02, 5 },
{ 8, 0x07, 7 },
{ 9, 0x06, 7 },
{ 10, 0x0b, 8 },
{ 11, 0x0a, 8 },
{ 12, 0x09, 8 },
{ 13, 0x08, 8 },
{ 14, 0x07, 8 },
{ 15, 0x06, 8 },
{ 16, 0x17, 10 },
{ 17, 0x16, 10 },
{ 18, 0x15, 10 },
{ 19, 0x14, 10 },
{ 20, 0x13, 10 },
{ 21, 0x12, 10 },
{ 22, 0x23, 11 },
{ 23, 0x22, 11 },
{ 24, 0x21, 11 },
{ 25, 0x20, 11 },
{ 26, 0x1f, 11 },
{ 27, 0x1e, 11 },
{ 28, 0x1d, 11 },
{ 29, 0x1c, 11 },
{ 30, 0x1b, 11 },
{ 31, 0x1a, 11 },
{ 32, 0x19, 11 },
{ 33, 0x18, 11 },
{ GST_MPEG_VIDEO_MACROBLOCK_ESCAPE, 0x08, 11 }
};
/* ------------------------------------------------------------------------- */
/* --- PTS Generator --- */
/* ------------------------------------------------------------------------- */
typedef struct _PTSGenerator PTSGenerator;
struct _PTSGenerator {
GstClockTime gop_pts; // Current GOP PTS
GstClockTime max_pts; // Max picture PTS
guint gop_tsn; // Absolute GOP TSN
guint max_tsn; // Max picture TSN, relative to last GOP TSN
guint ovl_tsn; // How many times TSN overflowed since GOP
guint lst_tsn; // Last picture TSN
guint fps_n;
guint fps_d;
};
static void
pts_init(PTSGenerator *tsg)
{
tsg->gop_pts = GST_CLOCK_TIME_NONE;
tsg->max_pts = GST_CLOCK_TIME_NONE;
tsg->gop_tsn = 0;
tsg->max_tsn = 0;
tsg->ovl_tsn = 0;
tsg->lst_tsn = 0;
tsg->fps_n = 0;
tsg->fps_d = 0;
}
static inline GstClockTime
pts_get_duration(PTSGenerator *tsg, guint num_frames)
{
return gst_util_uint64_scale(num_frames,
GST_SECOND * tsg->fps_d, tsg->fps_n);
}
static inline guint
pts_get_poc(PTSGenerator *tsg)
{
return tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->lst_tsn;
}
static void
pts_set_framerate(PTSGenerator *tsg, guint fps_n, guint fps_d)
{
tsg->fps_n = fps_n;
tsg->fps_d = fps_d;
}
static void
pts_sync(PTSGenerator *tsg, GstClockTime gop_pts)
{
guint gop_tsn;
if (!GST_CLOCK_TIME_IS_VALID(gop_pts) ||
(GST_CLOCK_TIME_IS_VALID(tsg->max_pts) && tsg->max_pts >= gop_pts)) {
/* Invalid GOP PTS, interpolate from the last known picture PTS */
if (GST_CLOCK_TIME_IS_VALID(tsg->max_pts)) {
gop_pts = tsg->max_pts + pts_get_duration(tsg, 1);
gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
}
else {
gop_pts = 0;
gop_tsn = 0;
}
}
else {
/* Interpolate GOP TSN from this valid PTS */
if (GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
gop_tsn = tsg->gop_tsn + gst_util_uint64_scale(
gop_pts - tsg->gop_pts + pts_get_duration(tsg, 1) - 1,
tsg->fps_n, GST_SECOND * tsg->fps_d);
else
gop_tsn = 0;
}
tsg->gop_pts = gop_pts;
tsg->gop_tsn = gop_tsn;
tsg->max_tsn = 0;
tsg->ovl_tsn = 0;
tsg->lst_tsn = 0;
}
static GstClockTime
pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
{
GstClockTime pts;
if (!GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
tsg->gop_pts = 0;
pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
if (!GST_CLOCK_TIME_IS_VALID(tsg->max_pts) || tsg->max_pts < pts)
tsg->max_pts = pts;
if (tsg->max_tsn < pic_tsn)
tsg->max_tsn = pic_tsn;
else if (tsg->max_tsn == 1023 && pic_tsn < tsg->lst_tsn) { /* TSN wrapped */
tsg->max_tsn = pic_tsn;
tsg->ovl_tsn++;
}
tsg->lst_tsn = pic_tsn;
return pts;
}
/* ------------------------------------------------------------------------- */
/* --- MPEG-2 Parser Info --- */
/* ------------------------------------------------------------------------- */
typedef struct _GstMpegVideoSliceHdr GstMpegVideoSliceHdr;
struct _GstMpegVideoSliceHdr {
guint16 slice_horizontal_position;
guint16 slice_vertical_position;
guint8 quantiser_scale_code;
guint8 intra_slice;
/* Size of the slice() header in bits */
guint header_size;
};
typedef struct _GstVaapiParserInfoMpeg2 GstVaapiParserInfoMpeg2;
struct _GstVaapiParserInfoMpeg2 {
GstMpegVideoPacket packet;
guint8 extension_type; /* for Extension packets */
union {
GstMpegVideoSequenceHdr seq_hdr;
GstMpegVideoSequenceExt seq_ext;
GstMpegVideoSequenceDisplayExt seq_display_ext;
GstMpegVideoGop gop;
GstMpegVideoQuantMatrixExt quant_matrix;
GstMpegVideoPictureHdr pic_hdr;
GstMpegVideoPictureExt pic_ext;
GstMpegVideoSliceHdr slice_hdr;
} data;
};
static inline const GstVaapiMiniObjectClass *
gst_vaapi_parser_info_mpeg2_class(void)
{
static const GstVaapiMiniObjectClass GstVaapiParserInfoMpeg2Class = {
sizeof(GstVaapiParserInfoMpeg2),
NULL
};
return &GstVaapiParserInfoMpeg2Class;
}
static inline GstVaapiParserInfoMpeg2 *
gst_vaapi_parser_info_mpeg2_new(void)
{
return (GstVaapiParserInfoMpeg2 *)
gst_vaapi_mini_object_new(gst_vaapi_parser_info_mpeg2_class());
}
#define gst_vaapi_parser_info_mpeg2_ref(pi) \
gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
#define gst_vaapi_parser_info_mpeg2_unref(pi) \
gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
#define gst_vaapi_parser_info_mpeg2_replace(old_pi_ptr, new_pi) \
gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
(GstVaapiMiniObject *)(new_pi))
/* ------------------------------------------------------------------------- */
/* --- MPEG-2 Decoder --- */
/* ------------------------------------------------------------------------- */
G_DEFINE_TYPE(GstVaapiDecoderMpeg2,
gst_vaapi_decoder_mpeg2,
GST_VAAPI_TYPE_DECODER)
#define GST_VAAPI_DECODER_MPEG2_CAST(decoder) \
((GstVaapiDecoderMpeg2 *)(decoder))
#define GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE((obj), \
GST_VAAPI_TYPE_DECODER_MPEG2, \
GstVaapiDecoderMpeg2Private))
struct _GstVaapiDecoderMpeg2Private {
GstVaapiProfile profile;
GstVaapiProfile hw_profile;
guint width;
guint height;
guint fps_n;
guint fps_d;
GstVaapiParserInfoMpeg2 *seq_hdr;
GstVaapiParserInfoMpeg2 *seq_ext;
GstVaapiParserInfoMpeg2 *seq_display_ext;
GstVaapiParserInfoMpeg2 *seq_scalable_ext;
GstVaapiParserInfoMpeg2 *pic_hdr;
GstVaapiParserInfoMpeg2 *pic_ext;
GstVaapiParserInfoMpeg2 *quant_matrix;
GstVaapiPicture *current_picture;
GstVaapiDpb *dpb;
PTSGenerator tsg;
guint is_constructed : 1;
guint is_opened : 1;
guint size_changed : 1;
guint profile_changed : 1;
guint quant_matrix_changed : 1;
guint progressive_sequence : 1;
guint closed_gop : 1;
guint broken_link : 1;
};
static void
gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_hdr, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->pic_hdr, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
if (priv->dpb) {
gst_vaapi_dpb_unref(priv->dpb);
priv->dpb = NULL;
}
}
static gboolean
gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_decoder_mpeg2_close(decoder);
priv->dpb = gst_vaapi_dpb_mpeg2_new();
if (!priv->dpb)
return FALSE;
pts_init(&priv->tsg);
return TRUE;
}
static void
gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoderMpeg2 *decoder)
{
gst_vaapi_decoder_mpeg2_close(decoder);
}
static gboolean
gst_vaapi_decoder_mpeg2_create(GstVaapiDecoderMpeg2 *decoder)
{
if (!GST_VAAPI_DECODER_CODEC(decoder))
return FALSE;
return TRUE;
}
static inline void
copy_quant_matrix(guint8 dst[64], const guint8 src[64])
{
memcpy(dst, src, 64);
}
static const char *
get_profile_str(GstVaapiProfile profile)
{
char *str;
switch (profile) {
case GST_VAAPI_PROFILE_MPEG2_SIMPLE: str = "simple"; break;
case GST_VAAPI_PROFILE_MPEG2_MAIN: str = "main"; break;
case GST_VAAPI_PROFILE_MPEG2_HIGH: str = "high"; break;
default: str = "<unknown>"; break;
}
return str;
}
static GstVaapiProfile
get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
{
GstVaapiDisplay * const va_display = GST_VAAPI_DECODER_DISPLAY(decoder);
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiProfile profile = priv->profile;
do {
/* Return immediately if the exact same profile was found */
if (gst_vaapi_display_has_decoder(va_display, profile, entrypoint))
break;
/* Otherwise, try to map to a higher profile */
switch (profile) {
case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
case GST_VAAPI_PROFILE_MPEG2_MAIN:
profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
break;
case GST_VAAPI_PROFILE_MPEG2_HIGH:
// Try to map to main profile if no high profile specific bits used
if (priv->profile == profile &&
!priv->seq_scalable_ext &&
(priv->seq_ext &&
priv->seq_ext->data.seq_ext.chroma_format == 1)) {
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
}
// fall-through
default:
profile = GST_VAAPI_PROFILE_UNKNOWN;
break;
}
} while (profile != GST_VAAPI_PROFILE_UNKNOWN);
if (profile != priv->profile)
GST_INFO("forced %s profile to %s profile",
get_profile_str(priv->profile), get_profile_str(profile));
return profile;
}
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
gboolean reset_context = FALSE;
if (priv->profile_changed) {
GST_DEBUG("profile changed");
priv->profile_changed = FALSE;
reset_context = TRUE;
priv->hw_profile = get_profile(decoder, entrypoint);
if (priv->hw_profile == GST_VAAPI_PROFILE_UNKNOWN)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
if (priv->size_changed) {
GST_DEBUG("size changed");
priv->size_changed = FALSE;
reset_context = TRUE;
}
if (reset_context) {
GstVaapiContextInfo info;
info.profile = priv->hw_profile;
info.entrypoint = entrypoint;
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
GST_VAAPI_DECODER_CAST(decoder),
&info
);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr->data.seq_hdr;
VAIQMatrixBufferMPEG2 *iq_matrix;
guint8 *intra_quant_matrix = NULL;
guint8 *non_intra_quant_matrix = NULL;
guint8 *chroma_intra_quant_matrix = NULL;
guint8 *chroma_non_intra_quant_matrix = NULL;
if (!priv->quant_matrix_changed)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
priv->quant_matrix_changed = FALSE;
picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(MPEG2, decoder);
if (!picture->iq_matrix) {
GST_ERROR("failed to allocate IQ matrix");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
iq_matrix = picture->iq_matrix->param;
intra_quant_matrix = seq_hdr->intra_quantizer_matrix;
non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
if (priv->quant_matrix) {
GstMpegVideoQuantMatrixExt * const quant_matrix =
&priv->quant_matrix->data.quant_matrix;
if (quant_matrix->load_intra_quantiser_matrix)
intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
if (quant_matrix->load_non_intra_quantiser_matrix)
non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
if (quant_matrix->load_chroma_intra_quantiser_matrix)
chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
}
iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
if (intra_quant_matrix)
copy_quant_matrix(iq_matrix->intra_quantiser_matrix,
intra_quant_matrix);
iq_matrix->load_non_intra_quantiser_matrix = non_intra_quant_matrix != NULL;
if (non_intra_quant_matrix)
copy_quant_matrix(iq_matrix->non_intra_quantiser_matrix,
non_intra_quant_matrix);
iq_matrix->load_chroma_intra_quantiser_matrix = chroma_intra_quant_matrix != NULL;
if (chroma_intra_quant_matrix)
copy_quant_matrix(iq_matrix->chroma_intra_quantiser_matrix,
chroma_intra_quant_matrix);
iq_matrix->load_chroma_non_intra_quantiser_matrix = chroma_non_intra_quant_matrix != NULL;
if (chroma_non_intra_quant_matrix)
copy_quant_matrix(iq_matrix->chroma_non_intra_quantiser_matrix,
chroma_non_intra_quant_matrix);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
if (!picture)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
if (!gst_vaapi_picture_decode(picture))
goto error;
if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
if (!gst_vaapi_dpb_add(priv->dpb, picture))
goto error;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
error:
/* XXX: fix for cases where first field failed to be decoded */
gst_vaapi_picture_replace(&priv->current_picture, NULL);
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
static GstVaapiDecoderStatus
parse_sequence(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_sequence_header(&pi->data.seq_hdr,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse sequence header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoSequenceHdr *seq_hdr;
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_hdr, unit->parsed_info);
seq_hdr = &priv->seq_hdr->data.seq_hdr;
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
priv->fps_n = seq_hdr->fps_n;
priv->fps_d = seq_hdr->fps_d;
pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
priv->width = seq_hdr->width;
priv->height = seq_hdr->height;
priv->size_changed = TRUE;
priv->quant_matrix_changed = TRUE;
priv->progressive_sequence = TRUE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_sequence_ext(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_sequence_extension(&pi->data.seq_ext,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse sequence-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoSequenceExt *seq_ext;
GstVaapiProfile profile;
guint width, height;
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, unit->parsed_info);
seq_ext = &priv->seq_ext->data.seq_ext;
priv->progressive_sequence = seq_ext->progressive;
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
width = (priv->width & 0x0fff) | ((guint32)seq_ext->horiz_size_ext << 12);
height = (priv->height & 0x0fff) | ((guint32)seq_ext->vert_size_ext << 12);
GST_DEBUG("video resolution %ux%u", width, height);
if (seq_ext->fps_n_ext && seq_ext->fps_d_ext) {
priv->fps_n *= seq_ext->fps_n_ext + 1;
priv->fps_d *= seq_ext->fps_d_ext + 1;
pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
}
if (priv->width != width) {
priv->width = width;
priv->size_changed = TRUE;
}
if (priv->height != height) {
priv->height = height;
priv->size_changed = TRUE;
}
switch (seq_ext->profile) {
case GST_MPEG_VIDEO_PROFILE_SIMPLE:
profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
break;
case GST_MPEG_VIDEO_PROFILE_MAIN:
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
case GST_MPEG_VIDEO_PROFILE_HIGH:
profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
break;
default:
GST_ERROR("unsupported profile %d", seq_ext->profile);
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
if (priv->profile != profile) {
priv->profile = profile;
priv->profile_changed = TRUE;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_sequence_display_ext(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_sequence_display_extension(
&pi->data.seq_display_ext,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse sequence-display-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext,
unit->parsed_info);
/* XXX: handle color primaries and cropping */
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiDecoderStatus status;
status = decode_current_picture(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
gst_vaapi_dpb_flush(priv->dpb);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_quant_matrix_ext(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_quant_matrix_extension(&pi->data.quant_matrix,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse quant-matrix-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, unit->parsed_info);
priv->quant_matrix_changed = TRUE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_gop(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_gop(&pi->data.gop,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse GOP");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoGop * const gop = &pi->data.gop;
priv->closed_gop = gop->closed_gop;
priv->broken_link = gop->broken_link;
GST_DEBUG("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
gop->hour, gop->minute, gop->second, gop->frame,
priv->closed_gop, priv->broken_link);
pts_sync(&priv->tsg, GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_picture(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_picture_header(&pi->data.pic_hdr,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse picture header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoSliceHdr * const slice_hdr = &pi->data.slice_hdr;
GstMpegVideoPacket * const packet = &pi->packet;
GstBitReader br;
gint mb_x, mb_y, mb_inc;
guint8 slice_vertical_position_extension;
guint8 extra_bit_slice, junk8;
gst_bit_reader_init(&br, packet->data + packet->offset, packet->size);
if (priv->height > 2800)
READ_UINT8(&br, slice_vertical_position_extension, 3);
if (priv->seq_scalable_ext) {
GST_ERROR("failed to parse slice with sequence_scalable_extension()");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
READ_UINT8(&br, slice_hdr->quantiser_scale_code, 5);
READ_UINT8(&br, extra_bit_slice, 1);
if (!extra_bit_slice)
slice_hdr->intra_slice = 0;
else {
READ_UINT8(&br, slice_hdr->intra_slice, 1);
READ_UINT8(&br, junk8, 7);
READ_UINT8(&br, extra_bit_slice, 1);
while (extra_bit_slice) {
READ_UINT8(&br, junk8, 8);
READ_UINT8(&br, extra_bit_slice, 1);
}
}
slice_hdr->header_size = 32 + gst_bit_reader_get_pos(&br);
mb_y = packet->type - GST_MPEG_VIDEO_PACKET_SLICE_MIN;
mb_x = -1;
do {
if (!decode_vlc(&br, &mb_inc, mpeg2_mbaddr_vlc_table,
G_N_ELEMENTS(mpeg2_mbaddr_vlc_table))) {
GST_WARNING("failed to decode first macroblock_address_increment");
goto failed;
}
mb_x += mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE ? 33 : mb_inc;
} while (mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE);
slice_hdr->slice_horizontal_position = mb_x;
slice_hdr->slice_vertical_position = mb_y;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
failed:
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
static GstVaapiDecoderStatus
decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
gst_vaapi_parser_info_mpeg2_replace(&priv->pic_hdr, unit->parsed_info);
gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
parse_picture_ext(GstVaapiDecoderUnit *unit)
{
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstMpegVideoPacket * const packet = &pi->packet;
if (!gst_mpeg_video_parse_picture_extension(&pi->data.pic_ext,
packet->data, packet->size, packet->offset)) {
GST_ERROR("failed to parse picture-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_picture_ext(GstVaapiDecoderMpeg2 *decoder,
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoPictureExt *pic_ext;
gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, unit->parsed_info);
pic_ext = &priv->pic_ext->data.pic_ext;
if (priv->progressive_sequence && !pic_ext->progressive_frame) {
GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
pic_ext->progressive_frame = 1;
}
if (pic_ext->picture_structure == 0 ||
(pic_ext->progressive_frame &&
pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
GST_WARNING("invalid picture_structure %d, replacing with \"frame\"",
pic_ext->picture_structure);
pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline guint32
pack_f_code(guint8 f_code[2][2])
{
return (((guint32)f_code[0][0] << 12) |
((guint32)f_code[0][1] << 8) |
((guint32)f_code[1][0] << 4) |
( f_code[1][1] ));
}
static GstVaapiDecoderStatus
init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_I:
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
picture->type = GST_VAAPI_PICTURE_TYPE_I;
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
picture->type = GST_VAAPI_PICTURE_TYPE_P;
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
picture->type = GST_VAAPI_PICTURE_TYPE_B;
break;
default:
GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
if (pic_ext->top_field_first)
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_TFF);
}
switch (pic_ext->picture_structure) {
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD:
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
break;
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_BOTTOM_FIELD:
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
break;
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME:
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
break;
}
/* Allocate dummy picture for first field based I-frame */
if (picture->type == GST_VAAPI_PICTURE_TYPE_I &&
!GST_VAAPI_PICTURE_IS_FRAME(picture) &&
gst_vaapi_dpb_size(priv->dpb) == 0) {
GstVaapiPicture *dummy_picture;
gboolean success;
dummy_picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
if (!dummy_picture) {
GST_ERROR("failed to allocate dummy picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
dummy_picture->type = GST_VAAPI_PICTURE_TYPE_I;
dummy_picture->pts = GST_CLOCK_TIME_NONE;
dummy_picture->poc = -1;
dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
GST_VAAPI_PICTURE_FLAG_SET(
dummy_picture,
(GST_VAAPI_PICTURE_FLAG_SKIPPED |
GST_VAAPI_PICTURE_FLAG_REFERENCE)
);
success = gst_vaapi_dpb_add(priv->dpb, dummy_picture);
gst_vaapi_picture_unref(dummy_picture);
if (!success) {
GST_ERROR("failed to add dummy picture into DPB");
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
GST_INFO("allocated dummy picture for first field based I-frame");
}
/* Update presentation time */
picture->pts = pts_eval(&priv->tsg,
GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
picture->poc = pts_get_poc(&priv->tsg);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
GstVaapiPicture *prev_picture, *next_picture;
/* Fill in VAPictureParameterBufferMPEG2 */
pic_param->horizontal_size = priv->width;
pic_param->vertical_size = priv->height;
pic_param->forward_reference_picture = VA_INVALID_ID;
pic_param->backward_reference_picture = VA_INVALID_ID;
pic_param->picture_coding_type = pic_hdr->pic_type;
pic_param->f_code = pack_f_code(pic_ext->f_code);
#define COPY_FIELD(a, b, f) \
pic_param->a.b.f = pic_ext->f
pic_param->picture_coding_extension.value = 0;
pic_param->picture_coding_extension.bits.is_first_field =
GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
COPY_FIELD(picture_coding_extension, bits, intra_dc_precision);
COPY_FIELD(picture_coding_extension, bits, picture_structure);
COPY_FIELD(picture_coding_extension, bits, top_field_first);
COPY_FIELD(picture_coding_extension, bits, frame_pred_frame_dct);
COPY_FIELD(picture_coding_extension, bits, concealment_motion_vectors);
COPY_FIELD(picture_coding_extension, bits, q_scale_type);
COPY_FIELD(picture_coding_extension, bits, intra_vlc_format);
COPY_FIELD(picture_coding_extension, bits, alternate_scan);
COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
COPY_FIELD(picture_coding_extension, bits, progressive_frame);
gst_vaapi_dpb_mpeg2_get_references(priv->dpb, picture,
&prev_picture, &next_picture);
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
if (next_picture)
pic_param->backward_reference_picture = next_picture->surface_id;
if (prev_picture)
pic_param->forward_reference_picture = prev_picture->surface_id;
else if (!priv->closed_gop)
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
break;
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
if (prev_picture)
pic_param->forward_reference_picture = prev_picture->surface_id;
break;
}
}
static GstVaapiDecoderStatus
decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstVaapiPicture * const picture = priv->current_picture;
GstVaapiSlice *slice;
VASliceParameterBufferMPEG2 *slice_param;
GstMpegVideoSliceHdr * const slice_hdr = &pi->data.slice_hdr;
GST_DEBUG("slice %d (%u bytes)", slice_hdr->slice_vertical_position,
unit->size);
slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder,
(GST_BUFFER_DATA(GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer) +
unit->offset), unit->size);
if (!slice) {
GST_ERROR("failed to allocate slice");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
gst_vaapi_picture_add_slice(picture, slice);
/* Fill in VASliceParameterBufferMPEG2 */
slice_param = slice->param;
slice_param->macroblock_offset = slice_hdr->header_size;
slice_param->slice_horizontal_position = slice_hdr->slice_horizontal_position;
slice_param->slice_vertical_position = slice_hdr->slice_vertical_position;
slice_param->quantiser_scale_code = slice_hdr->quantiser_scale_code;
slice_param->intra_slice_flag = slice_hdr->intra_slice;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline gint
scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
{
return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
0xffffff00, 0x00000100,
ofs, size,
scp);
}
static GstVaapiDecoderStatus
decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstVaapiParserInfoMpeg2 * const pi = unit->parsed_info;
GstVaapiDecoderStatus status;
switch (pi->packet.type) {
case GST_MPEG_VIDEO_PACKET_PICTURE:
if (!priv->width || !priv->height)
goto unknown_picture_size;
status = decode_picture(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
status = decode_sequence(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXTENSION:
switch (pi->extension_type) {
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
status = decode_sequence_ext(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
status = decode_sequence_display_ext(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
status = decode_quant_matrix_ext(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
if (!priv->width || !priv->height)
goto unknown_picture_size;
status = decode_picture_ext(decoder, unit);
break;
default:
// Ignore unknown start-code extensions
GST_WARNING("unsupported packet extension type 0x%02x",
pi->extension_type);
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
break;
case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
status = decode_sequence_end(decoder);
break;
case GST_MPEG_VIDEO_PACKET_GOP:
status = decode_gop(decoder, unit);
break;
default:
if (pi->packet.type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
pi->packet.type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
status = decode_slice(decoder, unit);
break;
}
GST_WARNING("unsupported packet type 0x%02x", pi->packet.type);
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
break;
}
return status;
unknown_picture_size:
// Ignore packet while picture size is undefined
// i.e. missing sequence headers, or not parsed correctly
GST_WARNING("failed to parse picture of unknown size");
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_decoder(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
g_return_val_if_fail(priv->is_constructed,
GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
if (!priv->is_opened) {
priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder);
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2 * const decoder =
GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
GstVaapiParserInfoMpeg2 *pi;
GstVaapiDecoderStatus status;
GstMpegVideoPacket *packet;
const guchar *buf;
guint32 start_code;
guint size, buf_size, flags;
gint ofs, ofs2;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
size = gst_adapter_available(adapter);
if (size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
ofs = scan_for_start_code(adapter, 0, size, &start_code);
if (ofs < 0)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
if (ofs > 0) {
gst_adapter_flush(adapter, ofs);
size -= ofs;
}
ofs2 = ps->input_offset2 - ofs - 4;
if (ofs2 < 4)
ofs2 = 4;
ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
if (ofs < 0) {
// Assume the whole packet is present if end-of-stream
if (!at_eos) {
ps->input_offset2 = size;
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
}
ofs = size;
}
buf_size = ofs;
ps->input_offset2 = 0;
buf = gst_adapter_peek(adapter, buf_size);
if (!buf)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
unit->size = buf_size;
pi = gst_vaapi_parser_info_mpeg2_new();
if (!pi)
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
gst_vaapi_decoder_unit_set_parsed_info(unit,
pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
packet = &pi->packet;
packet->data = buf;
packet->size = buf_size;
packet->offset = 4;
packet->type = start_code & 0xff;
/* Parse data */
switch (packet->type) {
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
status = parse_sequence(unit);
break;
case GST_MPEG_VIDEO_PACKET_GOP:
status = parse_gop(unit);
break;
case GST_MPEG_VIDEO_PACKET_PICTURE:
status = parse_picture(unit);
break;
case GST_MPEG_VIDEO_PACKET_EXTENSION:
if (G_UNLIKELY(buf_size < 5)) {
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
break;
}
pi->extension_type = buf[4] >> 4;
switch (pi->extension_type) {
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
status = parse_sequence_ext(unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
status = parse_sequence_display_ext(unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
status = parse_quant_matrix_ext(unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
status = parse_picture_ext(unit);
break;
default:
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
break;
default:
if (packet->type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
packet->type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
status = parse_slice(decoder, unit);
break;
}
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
/* Check for start of new picture */
flags = 0;
switch (packet->type) {
case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
break;
case GST_MPEG_VIDEO_PACKET_USER_DATA:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
/* fall-through */
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
case GST_MPEG_VIDEO_PACKET_GOP:
case GST_MPEG_VIDEO_PACKET_PICTURE:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
default:
if (packet->type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
packet->type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
// Ignore system start codes (PES headers)
else if (packet->type >= 0xb9 && packet->type <= 0xff)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
break;
}
GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
pi->packet.data = NULL;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_mpeg2_decode(GstVaapiDecoder *base_decoder,
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2 * const decoder =
GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
GstVaapiDecoderStatus status;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
return decode_unit(decoder, unit);
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
GstVaapiDecoderUnit *base_unit)
{
GstVaapiDecoderMpeg2 * const decoder =
GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GstMpegVideoSequenceHdr *seq_hdr;
GstMpegVideoSequenceExt *seq_ext;
GstMpegVideoSequenceDisplayExt *seq_display_ext;
GstVaapiPicture *picture;
GstVaapiDecoderStatus status;
if (!priv->width || !priv->height) {
// Ignore packet while picture size is undefined
// i.e. missing sequence headers, or not parsed correctly
GST_WARNING("failed to decode picture of unknown size");
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
seq_hdr = &priv->seq_hdr->data.seq_hdr;
seq_ext = priv->seq_ext ? &priv->seq_ext->data.seq_ext : NULL;
seq_display_ext = priv->seq_display_ext ?
&priv->seq_display_ext->data.seq_display_ext : NULL;
if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext,
seq_display_ext))
gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
seq_hdr->par_w, seq_hdr->par_h);
status = ensure_context(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
GST_ERROR("failed to reset context");
return status;
}
if (priv->current_picture) {
/* Re-use current picture where the first field was decoded */
picture = gst_vaapi_picture_new_field(priv->current_picture);
if (!picture) {
GST_ERROR("failed to allocate field picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
}
else {
/* Create new picture */
picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
if (!picture) {
GST_ERROR("failed to allocate picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
}
gst_vaapi_picture_replace(&priv->current_picture, picture);
gst_vaapi_picture_unref(picture);
status = ensure_quant_matrix(decoder, picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
GST_ERROR("failed to reset quantizer matrix");
return status;
}
status = init_picture(decoder, picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
fill_picture(decoder, picture);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_mpeg2_end_frame(GstVaapiDecoder *base_decoder)
{
GstVaapiDecoderMpeg2 * const decoder =
GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
return decode_current_picture(decoder);
}
static void
gst_vaapi_decoder_mpeg2_finalize(GObject *object)
{
GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2_CAST(object);
gst_vaapi_decoder_mpeg2_destroy(decoder);
G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class)->finalize(object);
}
static void
gst_vaapi_decoder_mpeg2_constructed(GObject *object)
{
GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2_CAST(object);
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
GObjectClass *parent_class;
parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class);
if (parent_class->constructed)
parent_class->constructed(object);
priv->is_constructed = gst_vaapi_decoder_mpeg2_create(decoder);
}
static void
gst_vaapi_decoder_mpeg2_class_init(GstVaapiDecoderMpeg2Class *klass)
{
GObjectClass * const object_class = G_OBJECT_CLASS(klass);
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg2Private));
object_class->finalize = gst_vaapi_decoder_mpeg2_finalize;
object_class->constructed = gst_vaapi_decoder_mpeg2_constructed;
decoder_class->parse = gst_vaapi_decoder_mpeg2_parse;
decoder_class->decode = gst_vaapi_decoder_mpeg2_decode;
decoder_class->start_frame = gst_vaapi_decoder_mpeg2_start_frame;
decoder_class->end_frame = gst_vaapi_decoder_mpeg2_end_frame;
}
static void
gst_vaapi_decoder_mpeg2_init(GstVaapiDecoderMpeg2 *decoder)
{
GstVaapiDecoderMpeg2Private *priv;
priv = GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(decoder);
decoder->priv = priv;
priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
priv->profile_changed = TRUE; /* Allow fallbacks to work */
}
/**
* gst_vaapi_decoder_mpeg2_new:
* @display: a #GstVaapiDisplay
* @caps: a #GstCaps holding codec information
*
* Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
* hold extra information like codec-data and pictured coded size.
*
* Return value: the newly allocated #GstVaapiDecoder object
*/
GstVaapiDecoder *
gst_vaapi_decoder_mpeg2_new(GstVaapiDisplay *display, GstCaps *caps)
{
GstVaapiDecoderMpeg2 *decoder;
g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
decoder = g_object_new(
GST_VAAPI_TYPE_DECODER_MPEG2,
"display", display,
"caps", caps,
NULL
);
if (!decoder->priv->is_constructed) {
g_object_unref(decoder);
return NULL;
}
return GST_VAAPI_DECODER_CAST(decoder);
}