mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-10 03:19:40 +00:00
748a8dbdc6
Implement GstVaapiDecoder.start_frame() and end_frame() semantics so that to create new VA context earlier and submit VA pictures to the HW for decoding as soon as possible. i.e. don't wait for the next frame to start decoding the previous one.
1436 lines
48 KiB
C
1436 lines
48 KiB
C
/*
|
|
* gstvaapidecoder_mpeg2.c - MPEG-2 decoder
|
|
*
|
|
* Copyright (C) 2011 Intel Corporation
|
|
*
|
|
* This library is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU Lesser General Public License
|
|
* as published by the Free Software Foundation; either version 2.1
|
|
* of the License, or (at your option) any later version.
|
|
*
|
|
* This library is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
* Lesser General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
* License along with this library; if not, write to the Free
|
|
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
|
* Boston, MA 02110-1301 USA
|
|
*/
|
|
|
|
/**
|
|
* SECTION:gstvaapidecoder_mpeg2
|
|
* @short_description: MPEG-2 decoder
|
|
*/
|
|
|
|
#include "sysdeps.h"
|
|
#include <string.h>
|
|
#include <gst/base/gstbitreader.h>
|
|
#include <gst/codecparsers/gstmpegvideoparser.h>
|
|
#include "gstvaapidecoder_mpeg2.h"
|
|
#include "gstvaapidecoder_objects.h"
|
|
#include "gstvaapidecoder_dpb.h"
|
|
#include "gstvaapidecoder_priv.h"
|
|
#include "gstvaapidisplay_priv.h"
|
|
#include "gstvaapiobject_priv.h"
|
|
|
|
#define DEBUG 1
|
|
#include "gstvaapidebug.h"
|
|
|
|
G_DEFINE_TYPE(GstVaapiDecoderMpeg2,
|
|
gst_vaapi_decoder_mpeg2,
|
|
GST_VAAPI_TYPE_DECODER)
|
|
|
|
#define GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(obj) \
|
|
(G_TYPE_INSTANCE_GET_PRIVATE((obj), \
|
|
GST_VAAPI_TYPE_DECODER_MPEG2, \
|
|
GstVaapiDecoderMpeg2Private))
|
|
|
|
#define READ_UINT8(br, val, nbits) G_STMT_START { \
|
|
if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
|
|
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
|
|
goto failed; \
|
|
} \
|
|
} G_STMT_END
|
|
|
|
#define SKIP(reader, nbits) G_STMT_START { \
|
|
if (!gst_bit_reader_skip (reader, nbits)) { \
|
|
GST_WARNING ("failed to skip nbits: %d", nbits); \
|
|
goto failed; \
|
|
} \
|
|
} G_STMT_END
|
|
|
|
/* ------------------------------------------------------------------------- */
|
|
/* --- PTS Generator --- */
|
|
/* ------------------------------------------------------------------------- */
|
|
|
|
typedef struct _PTSGenerator PTSGenerator;
|
|
struct _PTSGenerator {
|
|
GstClockTime gop_pts; // Current GOP PTS
|
|
GstClockTime max_pts; // Max picture PTS
|
|
guint gop_tsn; // Absolute GOP TSN
|
|
guint max_tsn; // Max picture TSN, relative to last GOP TSN
|
|
guint ovl_tsn; // How many times TSN overflowed since GOP
|
|
guint lst_tsn; // Last picture TSN
|
|
guint fps_n;
|
|
guint fps_d;
|
|
};
|
|
|
|
static void
|
|
pts_init(PTSGenerator *tsg)
|
|
{
|
|
tsg->gop_pts = GST_CLOCK_TIME_NONE;
|
|
tsg->max_pts = GST_CLOCK_TIME_NONE;
|
|
tsg->gop_tsn = 0;
|
|
tsg->max_tsn = 0;
|
|
tsg->ovl_tsn = 0;
|
|
tsg->lst_tsn = 0;
|
|
tsg->fps_n = 0;
|
|
tsg->fps_d = 0;
|
|
}
|
|
|
|
static inline GstClockTime
|
|
pts_get_duration(PTSGenerator *tsg, guint num_frames)
|
|
{
|
|
return gst_util_uint64_scale(num_frames,
|
|
GST_SECOND * tsg->fps_d, tsg->fps_n);
|
|
}
|
|
|
|
static inline guint
|
|
pts_get_poc(PTSGenerator *tsg)
|
|
{
|
|
return tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->lst_tsn;
|
|
}
|
|
|
|
static void
|
|
pts_set_framerate(PTSGenerator *tsg, guint fps_n, guint fps_d)
|
|
{
|
|
tsg->fps_n = fps_n;
|
|
tsg->fps_d = fps_d;
|
|
}
|
|
|
|
static void
|
|
pts_sync(PTSGenerator *tsg, GstClockTime gop_pts)
|
|
{
|
|
guint gop_tsn;
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID(gop_pts) ||
|
|
(GST_CLOCK_TIME_IS_VALID(tsg->max_pts) && tsg->max_pts >= gop_pts)) {
|
|
/* Invalid GOP PTS, interpolate from the last known picture PTS */
|
|
if (GST_CLOCK_TIME_IS_VALID(tsg->max_pts)) {
|
|
gop_pts = tsg->max_pts + pts_get_duration(tsg, 1);
|
|
gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
|
|
}
|
|
else {
|
|
gop_pts = 0;
|
|
gop_tsn = 0;
|
|
}
|
|
}
|
|
else {
|
|
/* Interpolate GOP TSN from this valid PTS */
|
|
if (GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
|
|
gop_tsn = tsg->gop_tsn + gst_util_uint64_scale(
|
|
gop_pts - tsg->gop_pts + pts_get_duration(tsg, 1) - 1,
|
|
tsg->fps_n, GST_SECOND * tsg->fps_d);
|
|
else
|
|
gop_tsn = 0;
|
|
}
|
|
|
|
tsg->gop_pts = gop_pts;
|
|
tsg->gop_tsn = gop_tsn;
|
|
tsg->max_tsn = 0;
|
|
tsg->ovl_tsn = 0;
|
|
tsg->lst_tsn = 0;
|
|
}
|
|
|
|
static GstClockTime
|
|
pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
|
|
{
|
|
GstClockTime pts;
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
|
|
tsg->gop_pts = 0;
|
|
|
|
pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID(tsg->max_pts) || tsg->max_pts < pts)
|
|
tsg->max_pts = pts;
|
|
|
|
if (tsg->max_tsn < pic_tsn)
|
|
tsg->max_tsn = pic_tsn;
|
|
else if (tsg->max_tsn == 1023 && pic_tsn < tsg->lst_tsn) { /* TSN wrapped */
|
|
tsg->max_tsn = pic_tsn;
|
|
tsg->ovl_tsn++;
|
|
}
|
|
tsg->lst_tsn = pic_tsn;
|
|
return pts;
|
|
}
|
|
|
|
/* ------------------------------------------------------------------------- */
|
|
/* --- MPEG-2 Decoder Units --- */
|
|
/* ------------------------------------------------------------------------- */
|
|
|
|
typedef struct _GstMpegVideoSliceHdr GstMpegVideoSliceHdr;
|
|
struct _GstMpegVideoSliceHdr {
|
|
guint16 slice_horizontal_position;
|
|
guint16 slice_vertical_position;
|
|
guint8 quantiser_scale_code;
|
|
guint8 intra_slice;
|
|
|
|
/* Size of the slice() header in bits */
|
|
guint header_size;
|
|
};
|
|
|
|
typedef struct _GstVaapiDecoderUnitMpeg2 GstVaapiDecoderUnitMpeg2;
|
|
struct _GstVaapiDecoderUnitMpeg2 {
|
|
GstVaapiDecoderUnit base;
|
|
GstMpegVideoPacket packet;
|
|
guint8 extension_type; /* for Extension packets */
|
|
union {
|
|
GstMpegVideoSequenceHdr seq_hdr;
|
|
GstMpegVideoSequenceExt seq_ext;
|
|
GstMpegVideoGop gop;
|
|
GstMpegVideoQuantMatrixExt quant_matrix;
|
|
GstMpegVideoPictureHdr pic_hdr;
|
|
GstMpegVideoPictureExt pic_ext;
|
|
GstMpegVideoSliceHdr slice_hdr;
|
|
} data;
|
|
};
|
|
|
|
static GstVaapiDecoderUnitMpeg2 *
|
|
gst_vaapi_decoder_unit_mpeg2_new(guint size)
|
|
{
|
|
GstVaapiDecoderUnitMpeg2 *unit;
|
|
|
|
static const GstVaapiMiniObjectClass GstVaapiDecoderUnitMpeg2Class = {
|
|
sizeof(GstVaapiDecoderUnitMpeg2),
|
|
(GDestroyNotify)gst_vaapi_decoder_unit_finalize
|
|
};
|
|
|
|
unit = (GstVaapiDecoderUnitMpeg2 *)
|
|
gst_vaapi_mini_object_new(&GstVaapiDecoderUnitMpeg2Class);
|
|
if (!unit)
|
|
return NULL;
|
|
|
|
gst_vaapi_decoder_unit_init(&unit->base, size);
|
|
return unit;
|
|
}
|
|
|
|
/* ------------------------------------------------------------------------- */
|
|
/* --- MPEG-2 Decoder --- */
|
|
/* ------------------------------------------------------------------------- */
|
|
|
|
struct _GstVaapiDecoderMpeg2Private {
|
|
GstVaapiProfile profile;
|
|
GstVaapiProfile hw_profile;
|
|
guint width;
|
|
guint height;
|
|
guint fps_n;
|
|
guint fps_d;
|
|
GstVaapiDecoderUnitMpeg2 *seq_hdr_unit;
|
|
GstVaapiDecoderUnitMpeg2 *seq_ext_unit;
|
|
GstVaapiDecoderUnitMpeg2 *seq_scalable_ext_unit;
|
|
GstVaapiDecoderUnitMpeg2 *pic_hdr_unit;
|
|
GstVaapiDecoderUnitMpeg2 *pic_ext_unit;
|
|
GstVaapiDecoderUnitMpeg2 *quant_matrix_unit;
|
|
GstVaapiPicture *current_picture;
|
|
GstVaapiDpb *dpb;
|
|
PTSGenerator tsg;
|
|
guint is_constructed : 1;
|
|
guint is_opened : 1;
|
|
guint size_changed : 1;
|
|
guint profile_changed : 1;
|
|
guint quant_matrix_changed : 1;
|
|
guint progressive_sequence : 1;
|
|
guint closed_gop : 1;
|
|
guint broken_link : 1;
|
|
};
|
|
|
|
/* VLC decoder from gst-plugins-bad */
|
|
typedef struct _VLCTable VLCTable;
|
|
struct _VLCTable {
|
|
gint value;
|
|
guint cword;
|
|
guint cbits;
|
|
};
|
|
|
|
static gboolean
|
|
decode_vlc(GstBitReader *br, gint *res, const VLCTable *table, guint length)
|
|
{
|
|
guint8 i;
|
|
guint cbits = 0;
|
|
guint32 value = 0;
|
|
|
|
for (i = 0; i < length; i++) {
|
|
if (cbits != table[i].cbits) {
|
|
cbits = table[i].cbits;
|
|
if (!gst_bit_reader_peek_bits_uint32(br, &value, cbits)) {
|
|
goto failed;
|
|
}
|
|
}
|
|
|
|
if (value == table[i].cword) {
|
|
SKIP(br, cbits);
|
|
if (res)
|
|
*res = table[i].value;
|
|
return TRUE;
|
|
}
|
|
}
|
|
GST_DEBUG("failed to find VLC code");
|
|
|
|
failed:
|
|
GST_WARNING("failed to decode VLC, returning");
|
|
return FALSE;
|
|
}
|
|
|
|
enum {
|
|
GST_MPEG_VIDEO_MACROBLOCK_ESCAPE = -1,
|
|
};
|
|
|
|
/* Table B-1: Variable length codes for macroblock_address_increment */
|
|
static const VLCTable mpeg2_mbaddr_vlc_table[] = {
|
|
{ 1, 0x01, 1 },
|
|
{ 2, 0x03, 3 },
|
|
{ 3, 0x02, 3 },
|
|
{ 4, 0x03, 4 },
|
|
{ 5, 0x02, 4 },
|
|
{ 6, 0x03, 5 },
|
|
{ 7, 0x02, 5 },
|
|
{ 8, 0x07, 7 },
|
|
{ 9, 0x06, 7 },
|
|
{ 10, 0x0b, 8 },
|
|
{ 11, 0x0a, 8 },
|
|
{ 12, 0x09, 8 },
|
|
{ 13, 0x08, 8 },
|
|
{ 14, 0x07, 8 },
|
|
{ 15, 0x06, 8 },
|
|
{ 16, 0x17, 10 },
|
|
{ 17, 0x16, 10 },
|
|
{ 18, 0x15, 10 },
|
|
{ 19, 0x14, 10 },
|
|
{ 20, 0x13, 10 },
|
|
{ 21, 0x12, 10 },
|
|
{ 22, 0x23, 11 },
|
|
{ 23, 0x22, 11 },
|
|
{ 24, 0x21, 11 },
|
|
{ 25, 0x20, 11 },
|
|
{ 26, 0x1f, 11 },
|
|
{ 27, 0x1e, 11 },
|
|
{ 28, 0x1d, 11 },
|
|
{ 29, 0x1c, 11 },
|
|
{ 30, 0x1b, 11 },
|
|
{ 31, 0x1a, 11 },
|
|
{ 32, 0x19, 11 },
|
|
{ 33, 0x18, 11 },
|
|
{ GST_MPEG_VIDEO_MACROBLOCK_ESCAPE, 0x08, 11 }
|
|
};
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
|
|
gst_vaapi_picture_replace(&priv->current_picture, NULL);
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_hdr_unit, NULL);
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_ext_unit, NULL);
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_scalable_ext_unit, NULL);
|
|
gst_vaapi_decoder_unit_replace(&priv->pic_hdr_unit, NULL);
|
|
gst_vaapi_decoder_unit_replace(&priv->pic_ext_unit, NULL);
|
|
gst_vaapi_decoder_unit_replace(&priv->quant_matrix_unit, NULL);
|
|
|
|
if (priv->dpb) {
|
|
gst_vaapi_dpb_unref(priv->dpb);
|
|
priv->dpb = NULL;
|
|
}
|
|
}
|
|
|
|
static gboolean
|
|
gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
|
|
gst_vaapi_decoder_mpeg2_close(decoder);
|
|
|
|
priv->dpb = gst_vaapi_dpb_mpeg2_new();
|
|
if (!priv->dpb)
|
|
return FALSE;
|
|
|
|
pts_init(&priv->tsg);
|
|
return TRUE;
|
|
}
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
gst_vaapi_decoder_mpeg2_close(decoder);
|
|
}
|
|
|
|
static gboolean
|
|
gst_vaapi_decoder_mpeg2_create(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
if (!GST_VAAPI_DECODER_CODEC(decoder))
|
|
return FALSE;
|
|
return TRUE;
|
|
}
|
|
|
|
static inline void
|
|
copy_quant_matrix(guint8 dst[64], const guint8 src[64])
|
|
{
|
|
memcpy(dst, src, 64);
|
|
}
|
|
|
|
static const char *
|
|
get_profile_str(GstVaapiProfile profile)
|
|
{
|
|
char *str;
|
|
|
|
switch (profile) {
|
|
case GST_VAAPI_PROFILE_MPEG2_SIMPLE: str = "simple"; break;
|
|
case GST_VAAPI_PROFILE_MPEG2_MAIN: str = "main"; break;
|
|
case GST_VAAPI_PROFILE_MPEG2_HIGH: str = "high"; break;
|
|
default: str = "<unknown>"; break;
|
|
}
|
|
return str;
|
|
}
|
|
|
|
static GstVaapiProfile
|
|
get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
|
|
{
|
|
GstVaapiDisplay * const va_display = GST_VAAPI_DECODER_DISPLAY(decoder);
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiProfile profile = priv->profile;
|
|
|
|
do {
|
|
/* Return immediately if the exact same profile was found */
|
|
if (gst_vaapi_display_has_decoder(va_display, profile, entrypoint))
|
|
break;
|
|
|
|
/* Otherwise, try to map to a higher profile */
|
|
switch (profile) {
|
|
case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
|
|
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
|
|
break;
|
|
case GST_VAAPI_PROFILE_MPEG2_MAIN:
|
|
profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
|
|
break;
|
|
case GST_VAAPI_PROFILE_MPEG2_HIGH:
|
|
// Try to map to main profile if no high profile specific bits used
|
|
if (priv->profile == profile &&
|
|
!priv->seq_scalable_ext_unit &&
|
|
(priv->seq_ext_unit &&
|
|
priv->seq_ext_unit->data.seq_ext.chroma_format == 1)) {
|
|
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
|
|
break;
|
|
}
|
|
// fall-through
|
|
default:
|
|
profile = GST_VAAPI_PROFILE_UNKNOWN;
|
|
break;
|
|
}
|
|
} while (profile != GST_VAAPI_PROFILE_UNKNOWN);
|
|
|
|
if (profile != priv->profile)
|
|
GST_INFO("forced %s profile to %s profile",
|
|
get_profile_str(priv->profile), get_profile_str(profile));
|
|
return profile;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
ensure_context(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
|
|
gboolean reset_context = FALSE;
|
|
|
|
if (priv->profile_changed) {
|
|
GST_DEBUG("profile changed");
|
|
priv->profile_changed = FALSE;
|
|
reset_context = TRUE;
|
|
|
|
priv->hw_profile = get_profile(decoder, entrypoint);
|
|
if (priv->hw_profile == GST_VAAPI_PROFILE_UNKNOWN)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
|
|
}
|
|
|
|
if (priv->size_changed) {
|
|
GST_DEBUG("size changed");
|
|
priv->size_changed = FALSE;
|
|
reset_context = TRUE;
|
|
}
|
|
|
|
if (reset_context) {
|
|
GstVaapiContextInfo info;
|
|
|
|
info.profile = priv->hw_profile;
|
|
info.entrypoint = entrypoint;
|
|
info.width = priv->width;
|
|
info.height = priv->height;
|
|
info.ref_frames = 2;
|
|
reset_context = gst_vaapi_decoder_ensure_context(
|
|
GST_VAAPI_DECODER(decoder),
|
|
&info
|
|
);
|
|
if (!reset_context)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr_unit->data.seq_hdr;
|
|
VAIQMatrixBufferMPEG2 *iq_matrix;
|
|
guint8 *intra_quant_matrix = NULL;
|
|
guint8 *non_intra_quant_matrix = NULL;
|
|
guint8 *chroma_intra_quant_matrix = NULL;
|
|
guint8 *chroma_non_intra_quant_matrix = NULL;
|
|
|
|
if (!priv->quant_matrix_changed)
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
|
|
priv->quant_matrix_changed = FALSE;
|
|
|
|
picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(MPEG2, decoder);
|
|
if (!picture->iq_matrix) {
|
|
GST_ERROR("failed to allocate IQ matrix");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
iq_matrix = picture->iq_matrix->param;
|
|
|
|
intra_quant_matrix = seq_hdr->intra_quantizer_matrix;
|
|
non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
|
|
|
|
if (priv->quant_matrix_unit) {
|
|
GstMpegVideoQuantMatrixExt * const quant_matrix =
|
|
&priv->quant_matrix_unit->data.quant_matrix;
|
|
if (quant_matrix->load_intra_quantiser_matrix)
|
|
intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
|
|
if (quant_matrix->load_non_intra_quantiser_matrix)
|
|
non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
|
|
if (quant_matrix->load_chroma_intra_quantiser_matrix)
|
|
chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
|
|
if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
|
|
chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
|
|
}
|
|
|
|
iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
|
|
if (intra_quant_matrix)
|
|
copy_quant_matrix(iq_matrix->intra_quantiser_matrix,
|
|
intra_quant_matrix);
|
|
|
|
iq_matrix->load_non_intra_quantiser_matrix = non_intra_quant_matrix != NULL;
|
|
if (non_intra_quant_matrix)
|
|
copy_quant_matrix(iq_matrix->non_intra_quantiser_matrix,
|
|
non_intra_quant_matrix);
|
|
|
|
iq_matrix->load_chroma_intra_quantiser_matrix = chroma_intra_quant_matrix != NULL;
|
|
if (chroma_intra_quant_matrix)
|
|
copy_quant_matrix(iq_matrix->chroma_intra_quantiser_matrix,
|
|
chroma_intra_quant_matrix);
|
|
|
|
iq_matrix->load_chroma_non_intra_quantiser_matrix = chroma_non_intra_quant_matrix != NULL;
|
|
if (chroma_non_intra_quant_matrix)
|
|
copy_quant_matrix(iq_matrix->chroma_non_intra_quantiser_matrix,
|
|
chroma_non_intra_quant_matrix);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiPicture * const picture = priv->current_picture;
|
|
|
|
if (!picture)
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
|
|
if (!gst_vaapi_picture_decode(picture))
|
|
goto error;
|
|
if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
|
|
if (!gst_vaapi_dpb_add(priv->dpb, picture))
|
|
goto error;
|
|
gst_vaapi_picture_replace(&priv->current_picture, NULL);
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
|
|
error:
|
|
/* XXX: fix for cases where first field failed to be decoded */
|
|
gst_vaapi_picture_replace(&priv->current_picture, NULL);
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_sequence(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_sequence_header(&unit->data.seq_hdr,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse sequence header");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoSequenceHdr *seq_hdr;
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_hdr_unit, unit);
|
|
seq_hdr = &priv->seq_hdr_unit->data.seq_hdr;
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_ext_unit, NULL);
|
|
|
|
priv->fps_n = seq_hdr->fps_n;
|
|
priv->fps_d = seq_hdr->fps_d;
|
|
pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
|
|
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
|
|
|
|
if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, NULL, NULL))
|
|
gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
|
|
seq_hdr->par_w, seq_hdr->par_h);
|
|
|
|
priv->width = seq_hdr->width;
|
|
priv->height = seq_hdr->height;
|
|
priv->size_changed = TRUE;
|
|
priv->quant_matrix_changed = TRUE;
|
|
priv->progressive_sequence = TRUE;
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_sequence_ext(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_sequence_extension(&unit->data.seq_ext,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse sequence-extension");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
|
|
GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr_unit->data.seq_hdr;
|
|
GstMpegVideoSequenceExt *seq_ext;
|
|
GstVaapiProfile profile;
|
|
guint width, height;
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->seq_ext_unit, unit);
|
|
seq_ext = &priv->seq_ext_unit->data.seq_ext;
|
|
|
|
priv->progressive_sequence = seq_ext->progressive;
|
|
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
|
|
|
|
width = (priv->width & 0x0fff) | ((guint32)seq_ext->horiz_size_ext << 12);
|
|
height = (priv->height & 0x0fff) | ((guint32)seq_ext->vert_size_ext << 12);
|
|
GST_DEBUG("video resolution %ux%u", width, height);
|
|
|
|
if (seq_ext->fps_n_ext && seq_ext->fps_d_ext) {
|
|
priv->fps_n *= seq_ext->fps_n_ext + 1;
|
|
priv->fps_d *= seq_ext->fps_d_ext + 1;
|
|
pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
|
|
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
|
|
}
|
|
|
|
if (priv->width != width) {
|
|
priv->width = width;
|
|
priv->size_changed = TRUE;
|
|
}
|
|
|
|
if (priv->height != height) {
|
|
priv->height = height;
|
|
priv->size_changed = TRUE;
|
|
}
|
|
|
|
switch (seq_ext->profile) {
|
|
case GST_MPEG_VIDEO_PROFILE_SIMPLE:
|
|
profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
|
|
break;
|
|
case GST_MPEG_VIDEO_PROFILE_MAIN:
|
|
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
|
|
break;
|
|
case GST_MPEG_VIDEO_PROFILE_HIGH:
|
|
profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
|
|
break;
|
|
default:
|
|
GST_ERROR("unsupported profile %d", seq_ext->profile);
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
|
|
}
|
|
if (priv->profile != profile) {
|
|
priv->profile = profile;
|
|
priv->profile_changed = TRUE;
|
|
}
|
|
|
|
if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext, NULL))
|
|
gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
|
|
seq_hdr->par_w, seq_hdr->par_h);
|
|
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiDecoderStatus status;
|
|
|
|
status = decode_current_picture(decoder);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
|
return status;
|
|
|
|
gst_vaapi_dpb_flush(priv->dpb);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_quant_matrix_ext(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_quant_matrix_extension(&unit->data.quant_matrix,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse quant-matrix-extension");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
|
|
GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->quant_matrix_unit, unit);
|
|
priv->quant_matrix_changed = TRUE;
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_gop(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_gop(&unit->data.gop,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse GOP");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoGop * const gop = &unit->data.gop;
|
|
|
|
priv->closed_gop = gop->closed_gop;
|
|
priv->broken_link = gop->broken_link;
|
|
|
|
GST_DEBUG("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
|
|
gop->hour, gop->minute, gop->second, gop->frame,
|
|
priv->closed_gop, priv->broken_link);
|
|
|
|
pts_sync(&priv->tsg, GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_picture(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_picture_header(&unit->data.pic_hdr,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse picture header");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoSliceHdr * const slice_hdr = &unit->data.slice_hdr;
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
GstBitReader br;
|
|
gint mb_x, mb_y, mb_inc;
|
|
guint8 slice_vertical_position_extension;
|
|
guint8 extra_bit_slice, junk8;
|
|
|
|
gst_bit_reader_init(&br, packet->data + packet->offset, packet->size);
|
|
if (priv->height > 2800)
|
|
READ_UINT8(&br, slice_vertical_position_extension, 3);
|
|
if (priv->seq_scalable_ext_unit) {
|
|
GST_ERROR("failed to parse slice with sequence_scalable_extension()");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
READ_UINT8(&br, slice_hdr->quantiser_scale_code, 5);
|
|
READ_UINT8(&br, extra_bit_slice, 1);
|
|
if (!extra_bit_slice)
|
|
slice_hdr->intra_slice = 0;
|
|
else {
|
|
READ_UINT8(&br, slice_hdr->intra_slice, 1);
|
|
READ_UINT8(&br, junk8, 7);
|
|
READ_UINT8(&br, extra_bit_slice, 1);
|
|
while (extra_bit_slice) {
|
|
READ_UINT8(&br, junk8, 8);
|
|
READ_UINT8(&br, extra_bit_slice, 1);
|
|
}
|
|
}
|
|
slice_hdr->header_size = 32 + gst_bit_reader_get_pos(&br);
|
|
|
|
mb_y = packet->type - GST_MPEG_VIDEO_PACKET_SLICE_MIN;
|
|
mb_x = -1;
|
|
do {
|
|
if (!decode_vlc(&br, &mb_inc, mpeg2_mbaddr_vlc_table,
|
|
G_N_ELEMENTS(mpeg2_mbaddr_vlc_table))) {
|
|
GST_WARNING("failed to decode first macroblock_address_increment");
|
|
goto failed;
|
|
}
|
|
mb_x += mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE ? 33 : mb_inc;
|
|
} while (mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE);
|
|
|
|
slice_hdr->slice_horizontal_position = mb_x;
|
|
slice_hdr->slice_vertical_position = mb_y;
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
|
|
failed:
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->pic_hdr_unit, unit);
|
|
gst_vaapi_decoder_unit_replace(&priv->pic_ext_unit, NULL);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
parse_picture_ext(GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
|
|
if (!gst_mpeg_video_parse_picture_extension(&unit->data.pic_ext,
|
|
packet->data, packet->size, packet->offset)) {
|
|
GST_ERROR("failed to parse picture-extension");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_picture_ext(GstVaapiDecoderMpeg2 *decoder,
|
|
GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoPictureExt *pic_ext;
|
|
|
|
gst_vaapi_decoder_unit_replace(&priv->pic_ext_unit, unit);
|
|
pic_ext = &priv->pic_ext_unit->data.pic_ext;
|
|
|
|
if (priv->progressive_sequence && !pic_ext->progressive_frame) {
|
|
GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
|
|
pic_ext->progressive_frame = 1;
|
|
}
|
|
|
|
if (pic_ext->picture_structure == 0 ||
|
|
(pic_ext->progressive_frame &&
|
|
pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
|
|
GST_WARNING("invalid picture_structure %d, replacing with \"frame\"",
|
|
pic_ext->picture_structure);
|
|
pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static inline guint32
|
|
pack_f_code(guint8 f_code[2][2])
|
|
{
|
|
return (((guint32)f_code[0][0] << 12) |
|
|
((guint32)f_code[0][1] << 8) |
|
|
((guint32)f_code[1][0] << 4) |
|
|
( f_code[1][1] ));
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr_unit->data.pic_hdr;
|
|
GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext_unit->data.pic_ext;
|
|
|
|
switch (pic_hdr->pic_type) {
|
|
case GST_MPEG_VIDEO_PICTURE_TYPE_I:
|
|
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
|
|
picture->type = GST_VAAPI_PICTURE_TYPE_I;
|
|
break;
|
|
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
|
|
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
|
|
picture->type = GST_VAAPI_PICTURE_TYPE_P;
|
|
break;
|
|
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
|
|
picture->type = GST_VAAPI_PICTURE_TYPE_B;
|
|
break;
|
|
default:
|
|
GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
|
|
}
|
|
|
|
if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
|
|
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
|
|
if (pic_ext->top_field_first)
|
|
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_TFF);
|
|
}
|
|
|
|
switch (pic_ext->picture_structure) {
|
|
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD:
|
|
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
|
|
break;
|
|
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_BOTTOM_FIELD:
|
|
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
|
|
break;
|
|
case GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME:
|
|
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
|
|
break;
|
|
}
|
|
|
|
/* Allocate dummy picture for first field based I-frame */
|
|
if (picture->type == GST_VAAPI_PICTURE_TYPE_I &&
|
|
!GST_VAAPI_PICTURE_IS_FRAME(picture) &&
|
|
gst_vaapi_dpb_size(priv->dpb) == 0) {
|
|
GstVaapiPicture *dummy_picture;
|
|
gboolean success;
|
|
|
|
dummy_picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
|
|
if (!dummy_picture) {
|
|
GST_ERROR("failed to allocate dummy picture");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
|
|
dummy_picture->type = GST_VAAPI_PICTURE_TYPE_I;
|
|
dummy_picture->pts = GST_CLOCK_TIME_NONE;
|
|
dummy_picture->poc = -1;
|
|
dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
|
|
|
|
GST_VAAPI_PICTURE_FLAG_SET(
|
|
dummy_picture,
|
|
(GST_VAAPI_PICTURE_FLAG_SKIPPED |
|
|
GST_VAAPI_PICTURE_FLAG_REFERENCE)
|
|
);
|
|
|
|
success = gst_vaapi_dpb_add(priv->dpb, dummy_picture);
|
|
gst_vaapi_picture_unref(dummy_picture);
|
|
if (!success) {
|
|
GST_ERROR("failed to add dummy picture into DPB");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
|
|
}
|
|
GST_INFO("allocated dummy picture for first field based I-frame");
|
|
}
|
|
|
|
/* Update presentation time */
|
|
picture->pts = pts_eval(&priv->tsg,
|
|
GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
|
|
picture->poc = pts_get_poc(&priv->tsg);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static void
|
|
fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
|
|
GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr_unit->data.pic_hdr;
|
|
GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext_unit->data.pic_ext;
|
|
GstVaapiPicture *prev_picture, *next_picture;
|
|
|
|
/* Fill in VAPictureParameterBufferMPEG2 */
|
|
pic_param->horizontal_size = priv->width;
|
|
pic_param->vertical_size = priv->height;
|
|
pic_param->forward_reference_picture = VA_INVALID_ID;
|
|
pic_param->backward_reference_picture = VA_INVALID_ID;
|
|
pic_param->picture_coding_type = pic_hdr->pic_type;
|
|
pic_param->f_code = pack_f_code(pic_ext->f_code);
|
|
|
|
#define COPY_FIELD(a, b, f) \
|
|
pic_param->a.b.f = pic_ext->f
|
|
pic_param->picture_coding_extension.value = 0;
|
|
pic_param->picture_coding_extension.bits.is_first_field =
|
|
GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
|
|
COPY_FIELD(picture_coding_extension, bits, intra_dc_precision);
|
|
COPY_FIELD(picture_coding_extension, bits, picture_structure);
|
|
COPY_FIELD(picture_coding_extension, bits, top_field_first);
|
|
COPY_FIELD(picture_coding_extension, bits, frame_pred_frame_dct);
|
|
COPY_FIELD(picture_coding_extension, bits, concealment_motion_vectors);
|
|
COPY_FIELD(picture_coding_extension, bits, q_scale_type);
|
|
COPY_FIELD(picture_coding_extension, bits, intra_vlc_format);
|
|
COPY_FIELD(picture_coding_extension, bits, alternate_scan);
|
|
COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
|
|
COPY_FIELD(picture_coding_extension, bits, progressive_frame);
|
|
|
|
gst_vaapi_dpb_mpeg2_get_references(priv->dpb, picture,
|
|
&prev_picture, &next_picture);
|
|
|
|
switch (pic_hdr->pic_type) {
|
|
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
|
|
if (next_picture)
|
|
pic_param->backward_reference_picture = next_picture->surface_id;
|
|
if (prev_picture)
|
|
pic_param->forward_reference_picture = prev_picture->surface_id;
|
|
else if (!priv->closed_gop)
|
|
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
|
|
break;
|
|
case GST_MPEG_VIDEO_PICTURE_TYPE_P:
|
|
if (prev_picture)
|
|
pic_param->forward_reference_picture = prev_picture->surface_id;
|
|
break;
|
|
}
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiPicture * const picture = priv->current_picture;
|
|
GstVaapiSlice *slice;
|
|
VASliceParameterBufferMPEG2 *slice_param;
|
|
GstMpegVideoPacket * const packet = &unit->packet;
|
|
GstMpegVideoSliceHdr * const slice_hdr = &unit->data.slice_hdr;
|
|
|
|
GST_DEBUG("slice %d (%u bytes)", slice_hdr->slice_vertical_position,
|
|
packet->size);
|
|
|
|
unit->base.buffer = gst_buffer_create_sub(
|
|
GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer,
|
|
unit->base.offset, unit->base.size);
|
|
if (!unit->base.buffer) {
|
|
GST_ERROR("failed to allocate slice data");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
|
|
slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder,
|
|
GST_BUFFER_DATA(unit->base.buffer), packet->size);
|
|
if (!slice) {
|
|
GST_ERROR("failed to allocate slice");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
gst_vaapi_picture_add_slice(picture, slice);
|
|
|
|
/* Fill in VASliceParameterBufferMPEG2 */
|
|
slice_param = slice->param;
|
|
slice_param->macroblock_offset = slice_hdr->header_size;
|
|
slice_param->slice_horizontal_position = slice_hdr->slice_horizontal_position;
|
|
slice_param->slice_vertical_position = slice_hdr->slice_vertical_position;
|
|
slice_param->quantiser_scale_code = slice_hdr->quantiser_scale_code;
|
|
slice_param->intra_slice_flag = slice_hdr->intra_slice;
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static inline gint
|
|
scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
|
|
{
|
|
return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
|
|
0xffffff00, 0x00000100,
|
|
ofs, size,
|
|
scp);
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
const GstMpegVideoPacketTypeCode type = unit->packet.type;
|
|
GstVaapiDecoderStatus status;
|
|
|
|
switch (type) {
|
|
case GST_MPEG_VIDEO_PACKET_PICTURE:
|
|
if (!priv->width || !priv->height)
|
|
goto unknown_picture_size;
|
|
status = decode_picture(decoder, unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
|
|
status = decode_sequence(decoder, unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXTENSION:
|
|
switch (unit->extension_type) {
|
|
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
|
|
status = decode_sequence_ext(decoder, unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
|
|
status = decode_quant_matrix_ext(decoder, unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
|
|
if (!priv->width || !priv->height)
|
|
goto unknown_picture_size;
|
|
status = decode_picture_ext(decoder, unit);
|
|
break;
|
|
default:
|
|
// Ignore unknown start-code extensions
|
|
GST_WARNING("unsupported packet extension type 0x%02x",
|
|
unit->extension_type);
|
|
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
break;
|
|
}
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
|
|
status = decode_sequence_end(decoder);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_GOP:
|
|
status = decode_gop(decoder, unit);
|
|
break;
|
|
default:
|
|
if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
|
|
type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
|
|
status = decode_slice(decoder, unit);
|
|
break;
|
|
}
|
|
GST_WARNING("unsupported packet type 0x%02x", type);
|
|
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
break;
|
|
}
|
|
return status;
|
|
|
|
unknown_picture_size:
|
|
// Ignore packet while picture size is undefined
|
|
// i.e. missing sequence headers, or not parsed correctly
|
|
GST_WARNING("failed to parse picture of unknown size");
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
ensure_decoder(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
|
|
g_return_val_if_fail(priv->is_constructed,
|
|
GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
|
|
|
|
if (!priv->is_opened) {
|
|
priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder);
|
|
if (!priv->is_opened)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
|
|
}
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
|
|
GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit **unit_ptr)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder =
|
|
GST_VAAPI_DECODER_MPEG2(base_decoder);
|
|
GstVaapiDecoderUnitMpeg2 *unit;
|
|
GstVaapiDecoderStatus status;
|
|
GstMpegVideoPacket *packet;
|
|
const guchar *buf;
|
|
guint32 start_code;
|
|
guint size, buf_size, flags;
|
|
gint ofs;
|
|
|
|
status = ensure_decoder(decoder);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
|
return status;
|
|
|
|
size = gst_adapter_available(adapter);
|
|
if (size < 4)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
|
|
|
|
ofs = scan_for_start_code(adapter, 0, size, &start_code);
|
|
if (ofs < 0)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
|
|
gst_adapter_flush(adapter, ofs);
|
|
size -= ofs;
|
|
|
|
ofs = G_UNLIKELY(size < 8) ? -1 :
|
|
scan_for_start_code(adapter, 4, size - 4, NULL);
|
|
if (ofs < 0) {
|
|
// Assume the whole packet is present if end-of-stream
|
|
if (!at_eos)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
|
|
ofs = size;
|
|
}
|
|
buf_size = ofs;
|
|
|
|
buf = gst_adapter_peek(adapter, buf_size);
|
|
if (!buf)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
|
|
|
|
unit = gst_vaapi_decoder_unit_mpeg2_new(buf_size);
|
|
if (!unit)
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
|
|
packet = &unit->packet;
|
|
packet->data = buf;
|
|
packet->size = buf_size;
|
|
packet->offset = 4;
|
|
packet->type = start_code & 0xff;
|
|
|
|
/* Parse data */
|
|
switch (packet->type) {
|
|
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
|
|
status = parse_sequence(unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_GOP:
|
|
status = parse_gop(unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_PICTURE:
|
|
status = parse_picture(unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXTENSION:
|
|
if (G_UNLIKELY(buf_size < 5)) {
|
|
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
|
|
break;
|
|
}
|
|
unit->extension_type = buf[4] >> 4;
|
|
switch (unit->extension_type) {
|
|
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
|
|
status = parse_sequence_ext(unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
|
|
status = parse_quant_matrix_ext(unit);
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
|
|
status = parse_picture_ext(unit);
|
|
break;
|
|
default:
|
|
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
break;
|
|
}
|
|
break;
|
|
default:
|
|
if (packet->type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
|
|
packet->type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
|
|
status = parse_slice(decoder, unit);
|
|
break;
|
|
}
|
|
|
|
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
break;
|
|
}
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
|
return status;
|
|
|
|
/* Check for start of new picture */
|
|
flags = 0;
|
|
switch (packet->type) {
|
|
case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
|
|
break;
|
|
case GST_MPEG_VIDEO_PACKET_USER_DATA:
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
|
|
/* fall-through */
|
|
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
|
|
case GST_MPEG_VIDEO_PACKET_GOP:
|
|
case GST_MPEG_VIDEO_PACKET_PICTURE:
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
|
|
break;
|
|
default:
|
|
if (packet->type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
|
|
packet->type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX)
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
|
|
|
|
// Ignore system start codes (PES headers)
|
|
else if (packet->type >= 0xb9 && packet->type <= 0xff)
|
|
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
|
|
break;
|
|
}
|
|
GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
|
|
|
|
unit->packet.data = NULL;
|
|
*unit_ptr = &unit->base;
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
gst_vaapi_decoder_mpeg2_decode(GstVaapiDecoder *base_decoder,
|
|
GstVaapiDecoderUnit *unit)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder =
|
|
GST_VAAPI_DECODER_MPEG2(base_decoder);
|
|
GstVaapiDecoderStatus status;
|
|
|
|
status = ensure_decoder(decoder);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
|
return status;
|
|
return decode_unit(decoder, (GstVaapiDecoderUnitMpeg2 *)unit);
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
|
|
GstVaapiDecoderUnit *base_unit)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder =
|
|
GST_VAAPI_DECODER_MPEG2(base_decoder);
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GstVaapiPicture *picture;
|
|
GstVaapiDecoderStatus status;
|
|
|
|
if (!priv->width || !priv->height) {
|
|
// Ignore packet while picture size is undefined
|
|
// i.e. missing sequence headers, or not parsed correctly
|
|
GST_WARNING("failed to decode picture of unknown size");
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
status = ensure_context(decoder);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
|
|
GST_ERROR("failed to reset context");
|
|
return status;
|
|
}
|
|
|
|
if (priv->current_picture) {
|
|
/* Re-use current picture where the first field was decoded */
|
|
picture = gst_vaapi_picture_new_field(priv->current_picture);
|
|
if (!picture) {
|
|
GST_ERROR("failed to allocate field picture");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
}
|
|
else {
|
|
/* Create new picture */
|
|
picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
|
|
if (!picture) {
|
|
GST_ERROR("failed to allocate picture");
|
|
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
|
|
}
|
|
}
|
|
gst_vaapi_picture_replace(&priv->current_picture, picture);
|
|
gst_vaapi_picture_unref(picture);
|
|
|
|
status = ensure_quant_matrix(decoder, picture);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
|
|
GST_ERROR("failed to reset quantizer matrix");
|
|
return status;
|
|
}
|
|
|
|
status = init_picture(decoder, picture);
|
|
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
|
return status;
|
|
|
|
fill_picture(decoder, picture);
|
|
return GST_VAAPI_DECODER_STATUS_SUCCESS;
|
|
}
|
|
|
|
static GstVaapiDecoderStatus
|
|
gst_vaapi_decoder_mpeg2_end_frame(GstVaapiDecoder *base_decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder =
|
|
GST_VAAPI_DECODER_MPEG2(base_decoder);
|
|
|
|
return decode_current_picture(decoder);
|
|
}
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_finalize(GObject *object)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
|
|
|
|
gst_vaapi_decoder_mpeg2_destroy(decoder);
|
|
|
|
G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class)->finalize(object);
|
|
}
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_constructed(GObject *object)
|
|
{
|
|
GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
|
|
GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
|
|
GObjectClass *parent_class;
|
|
|
|
parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class);
|
|
if (parent_class->constructed)
|
|
parent_class->constructed(object);
|
|
|
|
priv->is_constructed = gst_vaapi_decoder_mpeg2_create(decoder);
|
|
}
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_class_init(GstVaapiDecoderMpeg2Class *klass)
|
|
{
|
|
GObjectClass * const object_class = G_OBJECT_CLASS(klass);
|
|
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
|
|
|
|
g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg2Private));
|
|
|
|
object_class->finalize = gst_vaapi_decoder_mpeg2_finalize;
|
|
object_class->constructed = gst_vaapi_decoder_mpeg2_constructed;
|
|
|
|
decoder_class->parse = gst_vaapi_decoder_mpeg2_parse;
|
|
decoder_class->decode = gst_vaapi_decoder_mpeg2_decode;
|
|
decoder_class->start_frame = gst_vaapi_decoder_mpeg2_start_frame;
|
|
decoder_class->end_frame = gst_vaapi_decoder_mpeg2_end_frame;
|
|
}
|
|
|
|
static void
|
|
gst_vaapi_decoder_mpeg2_init(GstVaapiDecoderMpeg2 *decoder)
|
|
{
|
|
GstVaapiDecoderMpeg2Private *priv;
|
|
|
|
priv = GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(decoder);
|
|
decoder->priv = priv;
|
|
priv->width = 0;
|
|
priv->height = 0;
|
|
priv->fps_n = 0;
|
|
priv->fps_d = 0;
|
|
priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
|
|
priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
|
|
priv->current_picture = NULL;
|
|
priv->is_constructed = FALSE;
|
|
priv->is_opened = FALSE;
|
|
priv->size_changed = FALSE;
|
|
priv->profile_changed = TRUE; /* Allow fallbacks to work */
|
|
priv->quant_matrix_changed = FALSE;
|
|
priv->progressive_sequence = FALSE;
|
|
priv->closed_gop = FALSE;
|
|
priv->broken_link = FALSE;
|
|
}
|
|
|
|
/**
|
|
* gst_vaapi_decoder_mpeg2_new:
|
|
* @display: a #GstVaapiDisplay
|
|
* @caps: a #GstCaps holding codec information
|
|
*
|
|
* Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
|
|
* hold extra information like codec-data and pictured coded size.
|
|
*
|
|
* Return value: the newly allocated #GstVaapiDecoder object
|
|
*/
|
|
GstVaapiDecoder *
|
|
gst_vaapi_decoder_mpeg2_new(GstVaapiDisplay *display, GstCaps *caps)
|
|
{
|
|
GstVaapiDecoderMpeg2 *decoder;
|
|
|
|
g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
|
|
g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
|
|
|
|
decoder = g_object_new(
|
|
GST_VAAPI_TYPE_DECODER_MPEG2,
|
|
"display", display,
|
|
"caps", caps,
|
|
NULL
|
|
);
|
|
if (!decoder->priv->is_constructed) {
|
|
g_object_unref(decoder);
|
|
return NULL;
|
|
}
|
|
return GST_VAAPI_DECODER_CAST(decoder);
|
|
}
|