libs: humongous code style fix

As part of the upstreaming process of gstreamer-vaapi into the GStreamer
umbrella, we need to comply with the project's code style. This meant to
change a lot of code.

It was decided to use a single massive patch to update the code style.

I would like to apologize with the original developers of this code because of
the history breakage.

Signed-off-by: Víctor Manuel Jáquez Leal <victorx.jaquez@intel.com>
This commit is contained in:
Víctor Manuel Jáquez Leal 2016-02-03 11:50:13 +01:00
parent 4b5be5973e
commit ac730d0a62
13 changed files with 8565 additions and 8730 deletions

View file

@ -37,7 +37,8 @@
*
* A decoded picture buffer (DPB) object.
*/
struct _GstVaapiDpb {
struct _GstVaapiDpb
{
/*< private > */
GstVaapiMiniObject parent_instance;
@ -52,7 +53,8 @@ struct _GstVaapiDpb {
*
* The #GstVaapiDpb base class.
*/
struct _GstVaapiDpbClass {
struct _GstVaapiDpbClass
{
/*< private > */
GstVaapiMiniObjectClass parent_class;
@ -63,11 +65,9 @@ struct _GstVaapiDpbClass {
GstVaapiPicture ** prev_picture_ptr, GstVaapiPicture ** next_picture_ptr);
};
static const GstVaapiMiniObjectClass *
gst_vaapi_dpb_class(void);
static const GstVaapiMiniObjectClass *gst_vaapi_dpb_class (void);
static const GstVaapiMiniObjectClass *
gst_vaapi_dpb2_class(void);
static const GstVaapiMiniObjectClass *gst_vaapi_dpb2_class (void);
/* ------------------------------------------------------------------------- */
/* --- Common utilities --- */
@ -80,8 +80,9 @@ dpb_new(guint max_pictures)
g_return_val_if_fail (max_pictures > 0, NULL);
dpb = (GstVaapiDpb *)gst_vaapi_mini_object_new(
max_pictures == 2 ? gst_vaapi_dpb2_class() : gst_vaapi_dpb_class());
dpb =
(GstVaapiDpb *) gst_vaapi_mini_object_new (max_pictures ==
2 ? gst_vaapi_dpb2_class () : gst_vaapi_dpb_class ());
if (!dpb)
return NULL;
@ -167,8 +168,7 @@ dpb_clear(GstVaapiDpb *dpb)
static void
dpb_flush (GstVaapiDpb * dpb)
{
while (dpb_bump(dpb))
;
while (dpb_bump (dpb));
dpb_clear (dpb);
}
@ -199,7 +199,6 @@ dpb_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
return FALSE;
}
}
// Store non-reference decoded picture into the DPB
else {
if (GST_VAAPI_PICTURE_IS_SKIPPED (picture))
@ -237,8 +236,7 @@ dpb_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
if (i + 1 < dpb->num_pictures)
next_picture = dpb->pictures[i + 1];
break;
}
else if (ref_picture->poc > picture->poc) {
} else if (ref_picture->poc > picture->poc) {
next_picture = ref_picture;
if (i > 0)
prev_picture = dpb->pictures[i - 1];
@ -338,7 +336,8 @@ gst_vaapi_dpb_class(void)
{
static const GstVaapiDpbClass GstVaapiDpbClass = {
{sizeof (GstVaapiDpb),
(GDestroyNotify)gst_vaapi_dpb_finalize },
(GDestroyNotify) gst_vaapi_dpb_finalize}
,
dpb_flush,
dpb_add,
@ -352,7 +351,8 @@ gst_vaapi_dpb2_class(void)
{
static const GstVaapiDpbClass GstVaapiDpb2Class = {
{sizeof (GstVaapiDpb),
(GDestroyNotify)gst_vaapi_dpb_finalize },
(GDestroyNotify) gst_vaapi_dpb_finalize}
,
dpb_flush,
dpb2_add,

File diff suppressed because it is too large Load diff

View file

@ -49,20 +49,20 @@
typedef struct _GstVaapiDecoderJpegPrivate GstVaapiDecoderJpegPrivate;
typedef struct _GstVaapiDecoderJpegClass GstVaapiDecoderJpegClass;
typedef enum {
typedef enum
{
GST_JPEG_VIDEO_STATE_GOT_SOI = 1 << 0,
GST_JPEG_VIDEO_STATE_GOT_SOF = 1 << 1,
GST_JPEG_VIDEO_STATE_GOT_SOS = 1 << 2,
GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE = 1 << 3,
GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE = 1 << 4,
GST_JPEG_VIDEO_STATE_VALID_PICTURE = (
GST_JPEG_VIDEO_STATE_GOT_SOI |
GST_JPEG_VIDEO_STATE_GOT_SOF |
GST_JPEG_VIDEO_STATE_GOT_SOS),
GST_JPEG_VIDEO_STATE_VALID_PICTURE = (GST_JPEG_VIDEO_STATE_GOT_SOI |
GST_JPEG_VIDEO_STATE_GOT_SOF | GST_JPEG_VIDEO_STATE_GOT_SOS),
} GstJpegVideoState;
struct _GstVaapiDecoderJpegPrivate {
struct _GstVaapiDecoderJpegPrivate
{
GstVaapiProfile profile;
guint width;
guint height;
@ -82,7 +82,8 @@ struct _GstVaapiDecoderJpegPrivate {
*
* A decoder based on Jpeg.
*/
struct _GstVaapiDecoderJpeg {
struct _GstVaapiDecoderJpeg
{
/*< private > */
GstVaapiDecoder parent_instance;
GstVaapiDecoderJpegPrivate priv;
@ -93,7 +94,8 @@ struct _GstVaapiDecoderJpeg {
*
* A decoder class based on Jpeg.
*/
struct _GstVaapiDecoderJpegClass {
struct _GstVaapiDecoderJpegClass
{
/*< private > */
GstVaapiDecoderClass parent_class;
};
@ -195,10 +197,8 @@ ensure_context(GstVaapiDecoderJpeg *decoder)
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
GST_VAAPI_DECODER(decoder),
&info
);
reset_context =
gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
@ -245,11 +245,8 @@ drop_frame:
}
static gboolean
fill_picture(
GstVaapiDecoderJpeg *decoder,
GstVaapiPicture *picture,
GstJpegFrameHdr *frame_hdr
)
fill_picture (GstVaapiDecoderJpeg * decoder,
GstVaapiPicture * picture, GstJpegFrameHdr * frame_hdr)
{
VAPictureParameterBufferJPEGBaseline *const pic_param = picture->param;
guint i;
@ -262,8 +259,7 @@ fill_picture(
if (frame_hdr->num_components > 4)
return FALSE;
for (i = 0; i < pic_param->num_components; i++) {
pic_param->components[i].component_id =
frame_hdr->components[i].identifier;
pic_param->components[i].component_id = frame_hdr->components[i].identifier;
pic_param->components[i].h_sampling_factor =
frame_hdr->components[i].horizontal_factor;
pic_param->components[i].v_sampling_factor =
@ -275,7 +271,8 @@ fill_picture(
}
static GstVaapiDecoderStatus
fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
fill_quantization_table (GstVaapiDecoderJpeg * decoder,
GstVaapiPicture * picture)
{
GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
VAIQMatrixBufferJPEGBaseline *iq_matrix;
@ -295,8 +292,7 @@ fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
GST_JPEG_MAX_QUANT_ELEMENTS);
for (i = 0; i < num_tables; i++) {
GstJpegQuantTable * const quant_table =
&priv->quant_tables.quant_tables[i];
GstJpegQuantTable *const quant_table = &priv->quant_tables.quant_tables[i];
iq_matrix->load_quantiser_table[i] = quant_table->valid;
if (!iq_matrix->load_quantiser_table[i])
@ -370,8 +366,7 @@ fill_huffman_table(GstVaapiHuffmanTable *huf_table,
huf_tables->ac_tables[i].huf_values,
sizeof (huffman_table->huffman_table[i].ac_values));
memset (huffman_table->huffman_table[i].pad,
0,
sizeof(huffman_table->huffman_table[i].pad));
0, sizeof (huffman_table->huffman_table[i].pad));
}
}
@ -441,10 +436,7 @@ decode_picture(GstVaapiDecoderJpeg *decoder, GstJpegSegment *seg)
}
static GstVaapiDecoderStatus
decode_huffman_table(
GstVaapiDecoderJpeg *decoder,
GstJpegSegment *seg
)
decode_huffman_table (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
{
GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
@ -461,10 +453,7 @@ decode_huffman_table(
}
static GstVaapiDecoderStatus
decode_quant_table(
GstVaapiDecoderJpeg *decoder,
GstJpegSegment *seg
)
decode_quant_table (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
{
GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
@ -481,10 +470,7 @@ decode_quant_table(
}
static GstVaapiDecoderStatus
decode_restart_interval(
GstVaapiDecoderJpeg *decoder,
GstJpegSegment *seg
)
decode_restart_interval (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
{
GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
@ -707,8 +693,7 @@ gst_vaapi_decoder_jpeg_parse(GstVaapiDecoder *base_decoder,
ofs2 = seg.offset + seg.size;
}
ofs2 = seg.offset - 2;
}
else {
} else {
// Check that the whole segment is actually available (in buffer)
ofs2 = ofs1 + seg.size;
if (ofs2 > buf_size) {

View file

@ -44,7 +44,8 @@
/* ------------------------------------------------------------------------- */
typedef struct _PTSGenerator PTSGenerator;
struct _PTSGenerator {
struct _PTSGenerator
{
GstClockTime gop_pts; // Current GOP PTS
GstClockTime max_pts; // Max picture PTS
guint gop_tsn; // Absolute GOP TSN
@ -99,18 +100,16 @@ pts_sync(PTSGenerator *tsg, GstClockTime gop_pts)
if (GST_CLOCK_TIME_IS_VALID (tsg->max_pts)) {
gop_pts = tsg->max_pts + pts_get_duration (tsg, 1);
gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
}
else {
} else {
gop_pts = 0;
gop_tsn = 0;
}
}
else {
} else {
/* Interpolate GOP TSN from this valid PTS */
if (GST_CLOCK_TIME_IS_VALID (tsg->gop_pts))
gop_tsn = tsg->gop_tsn + gst_util_uint64_scale(
gop_pts - tsg->gop_pts + pts_get_duration(tsg, 1) - 1,
tsg->fps_n, GST_SECOND * tsg->fps_d);
gop_tsn =
tsg->gop_tsn + gst_util_uint64_scale (gop_pts - tsg->gop_pts +
pts_get_duration (tsg, 1) - 1, tsg->fps_n, GST_SECOND * tsg->fps_d);
else
gop_tsn = 0;
}
@ -158,11 +157,13 @@ pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
/* ------------------------------------------------------------------------- */
typedef struct _GstVaapiParserInfoMpeg2 GstVaapiParserInfoMpeg2;
struct _GstVaapiParserInfoMpeg2 {
struct _GstVaapiParserInfoMpeg2
{
GstVaapiMiniObject parent_instance;
GstMpegVideoPacket packet;
guint8 extension_type; /* for Extension packets */
union {
union
{
GstMpegVideoSequenceHdr seq_hdr;
GstMpegVideoSequenceExt seq_ext;
GstMpegVideoSequenceDisplayExt seq_display_ext;
@ -224,26 +225,24 @@ gst_vaapi_parser_info_mpeg2_ensure(GstVaapiParserInfoMpeg2 **pi_ptr)
typedef struct _GstVaapiDecoderMpeg2Private GstVaapiDecoderMpeg2Private;
typedef struct _GstVaapiDecoderMpeg2Class GstVaapiDecoderMpeg2Class;
typedef enum {
typedef enum
{
GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR = 1 << 0,
GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT = 1 << 1,
GST_MPEG_VIDEO_STATE_GOT_PIC_HDR = 1 << 2,
GST_MPEG_VIDEO_STATE_GOT_PIC_EXT = 1 << 3,
GST_MPEG_VIDEO_STATE_GOT_SLICE = 1 << 4,
GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (
GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT),
GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (
GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (GST_MPEG_VIDEO_STATE_GOT_PIC_HDR |
GST_MPEG_VIDEO_STATE_GOT_PIC_EXT),
GST_MPEG_VIDEO_STATE_VALID_PICTURE = (
GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS|
GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS|
GST_MPEG_VIDEO_STATE_GOT_SLICE)
GST_MPEG_VIDEO_STATE_VALID_PICTURE = (GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS |
GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS | GST_MPEG_VIDEO_STATE_GOT_SLICE)
} GstMpegVideoState;
struct _GstVaapiDecoderMpeg2Private {
struct _GstVaapiDecoderMpeg2Private
{
GstVaapiProfile profile;
GstVaapiProfile hw_profile;
guint width;
@ -279,7 +278,8 @@ struct _GstVaapiDecoderMpeg2Private {
*
* A decoder based on Mpeg2.
*/
struct _GstVaapiDecoderMpeg2 {
struct _GstVaapiDecoderMpeg2
{
/*< private > */
GstVaapiDecoder parent_instance;
GstVaapiDecoderMpeg2Private priv;
@ -290,7 +290,8 @@ struct _GstVaapiDecoderMpeg2 {
*
* A decoder class based on Mpeg2.
*/
struct _GstVaapiDecoderMpeg2Class {
struct _GstVaapiDecoderMpeg2Class
{
/*< private > */
GstVaapiDecoderClass parent_class;
};
@ -367,10 +368,18 @@ get_profile_str(GstVaapiProfile profile)
char *str;
switch (profile) {
case GST_VAAPI_PROFILE_MPEG2_SIMPLE: str = "simple"; break;
case GST_VAAPI_PROFILE_MPEG2_MAIN: str = "main"; break;
case GST_VAAPI_PROFILE_MPEG2_HIGH: str = "high"; break;
default: str = "<unknown>"; break;
case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
str = "simple";
break;
case GST_VAAPI_PROFILE_MPEG2_MAIN:
str = "main";
break;
case GST_VAAPI_PROFILE_MPEG2_HIGH:
str = "high";
break;
default:
str = "<unknown>";
break;
}
return str;
}
@ -399,8 +408,7 @@ get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
// Try to map to main profile if no high profile specific bits used
if (priv->profile == profile &&
!priv->seq_scalable_ext &&
(priv->seq_ext &&
priv->seq_ext->data.seq_ext.chroma_format == 1)) {
(priv->seq_ext && priv->seq_ext->data.seq_ext.chroma_format == 1)) {
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
}
@ -449,10 +457,9 @@ ensure_context(GstVaapiDecoderMpeg2 *decoder)
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
GST_VAAPI_DECODER_CAST(decoder),
&info
);
reset_context =
gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER_CAST (decoder),
&info);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
@ -495,25 +502,27 @@ ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
if (quant_matrix->load_chroma_intra_quantiser_matrix)
chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
chroma_non_intra_quant_matrix =
quant_matrix->chroma_non_intra_quantiser_matrix;
}
iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
if (intra_quant_matrix)
copy_quant_matrix(iq_matrix->intra_quantiser_matrix,
intra_quant_matrix);
copy_quant_matrix (iq_matrix->intra_quantiser_matrix, intra_quant_matrix);
iq_matrix->load_non_intra_quantiser_matrix = non_intra_quant_matrix != NULL;
if (non_intra_quant_matrix)
copy_quant_matrix (iq_matrix->non_intra_quantiser_matrix,
non_intra_quant_matrix);
iq_matrix->load_chroma_intra_quantiser_matrix = chroma_intra_quant_matrix != NULL;
iq_matrix->load_chroma_intra_quantiser_matrix =
chroma_intra_quant_matrix != NULL;
if (chroma_intra_quant_matrix)
copy_quant_matrix (iq_matrix->chroma_intra_quantiser_matrix,
chroma_intra_quant_matrix);
iq_matrix->load_chroma_non_intra_quantiser_matrix = chroma_non_intra_quant_matrix != NULL;
iq_matrix->load_chroma_non_intra_quantiser_matrix =
chroma_non_intra_quant_matrix != NULL;
if (chroma_non_intra_quant_matrix)
copy_quant_matrix (iq_matrix->chroma_non_intra_quantiser_matrix,
chroma_non_intra_quant_matrix);
@ -752,7 +761,8 @@ parse_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
GstMpegVideoSequenceScalableExt *seq_scalable_ext;
if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->seq_scalable_ext)) {
GST_ERROR("failed to allocate parser info for sequence scalable extension");
GST_ERROR
("failed to allocate parser info for sequence scalable extension");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
@ -908,8 +918,7 @@ parse_picture_ext(GstVaapiDecoderMpeg2 *decoder,
GstMpegVideoPictureExt *pic_ext;
priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT | GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->pic_ext)) {
GST_ERROR ("failed to allocate parser info for picture extension");
@ -943,7 +952,8 @@ decode_picture_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
if (pic_ext->picture_structure == 0 ||
(pic_ext->progressive_frame &&
pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
pic_ext->picture_structure !=
GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
GST_WARNING ("invalid picture_structure %d, replacing with \"frame\"",
pic_ext->picture_structure);
pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
@ -958,8 +968,7 @@ pack_f_code(guint8 f_code[2][2])
{
return (((guint32) f_code[0][0] << 12) |
((guint32) f_code[0][1] << 8) |
((guint32)f_code[1][0] << 4) |
( f_code[1][1] ));
((guint32) f_code[1][0] << 4) | (f_code[1][1]));
}
static GstVaapiDecoderStatus
@ -1022,11 +1031,9 @@ init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
dummy_picture->poc = -1;
dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
GST_VAAPI_PICTURE_FLAG_SET(
dummy_picture,
GST_VAAPI_PICTURE_FLAG_SET (dummy_picture,
(GST_VAAPI_PICTURE_FLAG_SKIPPED |
GST_VAAPI_PICTURE_FLAG_OUTPUT |
GST_VAAPI_PICTURE_FLAG_REFERENCE)
GST_VAAPI_PICTURE_FLAG_OUTPUT | GST_VAAPI_PICTURE_FLAG_REFERENCE)
);
success = gst_vaapi_dpb_add (priv->dpb, dummy_picture);
@ -1108,8 +1115,7 @@ parse_slice(GstVaapiDecoderMpeg2 *decoder,
priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT |
GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
GST_MPEG_VIDEO_STATE_GOT_PIC_HDR | GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
return GST_VAAPI_DECODER_STATUS_SUCCESS;
@ -1413,7 +1419,6 @@ gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
break;
}
}
// Ignore system start codes (PES headers)
else if (type >= 0xb9 && type <= 0xff)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
@ -1495,8 +1500,7 @@ gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
GST_ERROR ("failed to allocate field picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
}
else {
} else {
/* Create new picture */
picture = GST_VAAPI_PICTURE_NEW (MPEG2, decoder);
if (!picture) {

View file

@ -44,7 +44,8 @@
typedef struct _GstVaapiDecoderMpeg4Private GstVaapiDecoderMpeg4Private;
typedef struct _GstVaapiDecoderMpeg4Class GstVaapiDecoderMpeg4Class;
struct _GstVaapiDecoderMpeg4Private {
struct _GstVaapiDecoderMpeg4Private
{
GstVaapiProfile profile;
guint level;
guint width;
@ -100,7 +101,8 @@ struct _GstVaapiDecoderMpeg4Private {
*
* A decoder based on Mpeg4.
*/
struct _GstVaapiDecoderMpeg4 {
struct _GstVaapiDecoderMpeg4
{
/*< private > */
GstVaapiDecoder parent_instance;
GstVaapiDecoderMpeg4Private priv;
@ -111,7 +113,8 @@ struct _GstVaapiDecoderMpeg4 {
*
* A decoder class based on Mpeg4.
*/
struct _GstVaapiDecoderMpeg4Class {
struct _GstVaapiDecoderMpeg4Class
{
/*< private > */
GstVaapiDecoderClass parent_class;
};
@ -226,10 +229,8 @@ ensure_context(GstVaapiDecoderMpeg4 *decoder)
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
GST_VAAPI_DECODER(decoder),
&info
);
reset_context =
gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
@ -242,7 +243,8 @@ ensure_quant_matrix(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
VAIQMatrixBufferMPEG4 *iq_matrix;
if (!priv->vol_hdr.load_intra_quant_mat && !priv->vol_hdr.load_non_intra_quant_mat) {
if (!priv->vol_hdr.load_intra_quant_mat
&& !priv->vol_hdr.load_non_intra_quant_mat) {
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
@ -257,16 +259,14 @@ ensure_quant_matrix(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
iq_matrix->load_intra_quant_mat = 1;
copy_quant_matrix (iq_matrix->intra_quant_mat,
priv->vol_hdr.intra_quant_mat);
}
else
} else
iq_matrix->load_intra_quant_mat = 0;
if (priv->vol_hdr.load_non_intra_quant_mat) {
iq_matrix->load_non_intra_quant_mat = 1;
copy_quant_matrix (iq_matrix->non_intra_quant_mat,
priv->vol_hdr.non_intra_quant_mat);
}
else
} else
iq_matrix->load_non_intra_quant_mat = 0;
@ -306,13 +306,15 @@ decode_current_picture(GstVaapiDecoderMpeg4 *decoder)
}
static GstVaapiDecoderStatus
decode_sequence(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
decode_sequence (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
guint buf_size)
{
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstMpeg4VisualObjectSequence *const vos_hdr = &priv->vos_hdr;
GstVaapiProfile profile;
if (gst_mpeg4_parse_visual_object_sequence(vos_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
if (gst_mpeg4_parse_visual_object_sequence (vos_hdr, buf,
buf_size) != GST_MPEG4_PARSER_OK) {
GST_DEBUG ("failed to parse sequence header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
@ -364,13 +366,15 @@ decode_sequence_end(GstVaapiDecoderMpeg4 *decoder)
}
static GstVaapiDecoderStatus
decode_visual_object(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
decode_visual_object (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
guint buf_size)
{
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstMpeg4VisualObject *vo_hdr = &priv->vo_hdr;
GstMpeg4VideoSignalType *signal_type = &priv->signal_type;
if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf, buf_size) != GST_MPEG4_PARSER_OK) {
if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf,
buf_size) != GST_MPEG4_PARSER_OK) {
GST_DEBUG ("failed to parse visual object");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
@ -380,14 +384,16 @@ decode_visual_object(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf
}
static GstVaapiDecoderStatus
decode_video_object_layer(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
decode_video_object_layer (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
guint buf_size)
{
GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstMpeg4VisualObject *vo_hdr = &priv->vo_hdr;
GstMpeg4VideoObjectLayer *vol_hdr = &priv->vol_hdr;
if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf,
buf_size) != GST_MPEG4_PARSER_OK) {
GST_DEBUG ("failed to parse video object layer");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
@ -403,7 +409,8 @@ decode_video_object_layer(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guin
gst_vaapi_decoder_set_framerate (base_decoder, priv->fps_n, priv->fps_d);
}
gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder, priv->vol_hdr.par_width, priv->vol_hdr.par_height);
gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder,
priv->vol_hdr.par_width, priv->vol_hdr.par_height);
gst_vaapi_decoder_set_picture_size (base_decoder, priv->width, priv->height);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
@ -417,12 +424,12 @@ decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
GstClockTime gop_time;
if (buf_size > 4) {
if (gst_mpeg4_parse_group_of_vop(&gop, buf, buf_size) != GST_MPEG4_PARSER_OK) {
if (gst_mpeg4_parse_group_of_vop (&gop, buf,
buf_size) != GST_MPEG4_PARSER_OK) {
GST_DEBUG ("failed to parse GOP");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
}
else {
} else {
gop.closed = 1;
gop.broken_link = 0;
gop.hours = 0;
@ -434,8 +441,7 @@ decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
priv->broken_link = gop.broken_link;
GST_DEBUG ("GOP %02u:%02u:%02u (closed_gop %d, broken_link %d)",
gop.hours, gop.minutes, gop.seconds,
priv->closed_gop, priv->broken_link);
gop.hours, gop.minutes, gop.seconds, priv->closed_gop, priv->broken_link);
gop_time = gop.hours * 3600 + gop.minutes * 60 + gop.seconds;
priv->last_sync_time = gop_time;
@ -452,8 +458,7 @@ decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
void
calculate_pts_diff (GstVaapiDecoderMpeg4 * decoder,
GstMpeg4VideoObjectLayer *vol_hdr,
GstMpeg4VideoObjectPlane *vop_hdr)
GstMpeg4VideoObjectLayer * vol_hdr, GstMpeg4VideoObjectPlane * vop_hdr)
{
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstClockTime frame_timestamp;
@ -461,13 +466,11 @@ calculate_pts_diff(GstVaapiDecoderMpeg4 *decoder,
frame_timestamp = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
if (frame_timestamp && frame_timestamp != GST_CLOCK_TIME_NONE) {
/* Buffer with timestamp */
if (priv->max_pts != GST_CLOCK_TIME_NONE &&
frame_timestamp < priv->max_pts) {
if (priv->max_pts != GST_CLOCK_TIME_NONE && frame_timestamp < priv->max_pts) {
frame_timestamp = priv->max_pts +
gst_util_uint64_scale ((vol_hdr->fixed_vop_rate ?
vol_hdr->fixed_vop_time_increment : 1),
GST_SECOND,
vol_hdr->vop_time_increment_resolution);
GST_SECOND, vol_hdr->vop_time_increment_resolution);
}
} else {
/* Buffer without timestamp set */
@ -478,16 +481,14 @@ calculate_pts_diff(GstVaapiDecoderMpeg4 *decoder,
tmp_pts = priv->pts_diff + priv->gop_pts +
vop_hdr->modulo_time_base * GST_SECOND +
gst_util_uint64_scale (vop_hdr->time_increment,
GST_SECOND,
vol_hdr->vop_time_increment_resolution);
GST_SECOND, vol_hdr->vop_time_increment_resolution);
if (tmp_pts > priv->max_pts)
frame_timestamp = tmp_pts;
else
frame_timestamp = priv->max_pts +
gst_util_uint64_scale ((vol_hdr->fixed_vop_rate ?
vol_hdr->fixed_vop_time_increment : 1),
GST_SECOND,
vol_hdr->vop_time_increment_resolution);
GST_SECOND, vol_hdr->vop_time_increment_resolution);
}
}
@ -498,7 +499,8 @@ calculate_pts_diff(GstVaapiDecoderMpeg4 *decoder,
}
static GstVaapiDecoderStatus
decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
decode_picture (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
guint buf_size)
{
GstMpeg4ParseResult parser_result = GST_MPEG4_PARSER_OK;
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
@ -511,11 +513,14 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
// context depends on priv->width and priv->height, so we move parse_vop a little earlier
if (priv->is_svh) {
parser_result = gst_mpeg4_parse_video_plane_short_header(&priv->svh_hdr, buf, buf_size);
parser_result =
gst_mpeg4_parse_video_plane_short_header (&priv->svh_hdr, buf,
buf_size);
}
else {
parser_result = gst_mpeg4_parse_video_object_plane(vop_hdr, sprite_trajectory, vol_hdr, buf, buf_size);
} else {
parser_result =
gst_mpeg4_parse_video_object_plane (vop_hdr, sprite_trajectory, vol_hdr,
buf, buf_size);
/* Need to skip this frame if VOP was not coded */
if (GST_MPEG4_PARSER_OK == parser_result && !vop_hdr->coded)
return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
@ -529,8 +534,7 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
if (priv->is_svh) {
priv->width = priv->svh_hdr.vop_width;
priv->height = priv->svh_hdr.vop_height;
}
else {
} else {
if (!vop_hdr->width && !vop_hdr->height) {
vop_hdr->width = vol_hdr->width;
vop_hdr->height = vol_hdr->height;
@ -574,8 +578,7 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
*/
if (priv->is_svh) {
priv->coding_type = priv->svh_hdr.picture_coding_type;
}
else {
} else {
priv->coding_type = priv->vop_hdr.coding_type;
}
switch (priv->coding_type) {
@ -622,8 +625,7 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
pts += gst_util_uint64_scale (delta_ref, GST_SECOND * 1001, 30000);
priv->sync_time = pts;
priv->prev_t_ref = priv->svh_hdr.temporal_reference;
}
else {
} else {
/* Update priv->pts_diff */
if (priv->calculate_pts_diff) {
calculate_pts_diff (decoder, vol_hdr, vop_hdr);
@ -636,16 +638,23 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
priv->last_sync_time = priv->sync_time;
priv->sync_time = priv->last_sync_time + vop_hdr->modulo_time_base;
pts = priv->sync_time * GST_SECOND;
pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
pts +=
gst_util_uint64_scale (vop_hdr->time_increment, GST_SECOND,
vol_hdr->vop_time_increment_resolution);
priv->last_non_b_scale_time = priv->non_b_scale_time;
priv->non_b_scale_time = priv->sync_time * vol_hdr->vop_time_increment_resolution + vop_hdr->time_increment;
priv->non_b_scale_time =
priv->sync_time * vol_hdr->vop_time_increment_resolution +
vop_hdr->time_increment;
priv->trd = priv->non_b_scale_time - priv->last_non_b_scale_time;
}
else {
} else {
// increment basing on display oder
pts = (priv->last_sync_time + vop_hdr->modulo_time_base) * GST_SECOND;
pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
priv->trb = (priv->last_sync_time + vop_hdr->modulo_time_base) * vol_hdr->vop_time_increment_resolution +
pts +=
gst_util_uint64_scale (vop_hdr->time_increment, GST_SECOND,
vol_hdr->vop_time_increment_resolution);
priv->trb =
(priv->last_sync_time +
vop_hdr->modulo_time_base) * vol_hdr->vop_time_increment_resolution +
vop_hdr->time_increment - priv->last_non_b_scale_time;
}
}
@ -702,13 +711,14 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
// VOP parameters
pic_param->vop_width = priv->svh_hdr.vop_width;
pic_param->vop_height = priv->svh_hdr.vop_height;
pic_param->vop_fields.bits.vop_coding_type = priv->svh_hdr.picture_coding_type;
pic_param->vop_time_increment_resolution = priv->vol_hdr.vop_time_increment_resolution;
pic_param->vop_fields.bits.vop_coding_type =
priv->svh_hdr.picture_coding_type;
pic_param->vop_time_increment_resolution =
priv->vol_hdr.vop_time_increment_resolution;
pic_param->num_gobs_in_vop = priv->svh_hdr.num_gobs_in_vop;
pic_param->num_macroblocks_in_gob = priv->svh_hdr.num_macroblocks_in_gob;
}
else {
} else {
int i;
// VOL parameters
@ -717,17 +727,23 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
pic_param->vol_fields.bits.interlaced = priv->vol_hdr.interlaced;
pic_param->vol_fields.bits.obmc_disable = priv->vol_hdr.obmc_disable;
pic_param->vol_fields.bits.sprite_enable = priv->vol_hdr.sprite_enable;
pic_param->vol_fields.bits.sprite_warping_accuracy = priv->vol_hdr.sprite_warping_accuracy;
pic_param->vol_fields.bits.sprite_warping_accuracy =
priv->vol_hdr.sprite_warping_accuracy;
pic_param->vol_fields.bits.quant_type = priv->vol_hdr.quant_type;
pic_param->vol_fields.bits.quarter_sample = priv->vol_hdr.quarter_sample;
pic_param->vol_fields.bits.data_partitioned = priv->vol_hdr.data_partitioned;
pic_param->vol_fields.bits.data_partitioned =
priv->vol_hdr.data_partitioned;
pic_param->vol_fields.bits.reversible_vlc = priv->vol_hdr.reversible_vlc;
pic_param->vol_fields.bits.resync_marker_disable = priv->vol_hdr.resync_marker_disable;
pic_param->no_of_sprite_warping_points = priv->vol_hdr.no_of_sprite_warping_points;
pic_param->vol_fields.bits.resync_marker_disable =
priv->vol_hdr.resync_marker_disable;
pic_param->no_of_sprite_warping_points =
priv->vol_hdr.no_of_sprite_warping_points;
for (i = 0; i < 3 && i < priv->vol_hdr.no_of_sprite_warping_points; i++) {
pic_param->sprite_trajectory_du[i] = priv->sprite_trajectory.vop_ref_points[i];
pic_param->sprite_trajectory_dv[i] = priv->sprite_trajectory.sprite_ref_points[i];
pic_param->sprite_trajectory_du[i] =
priv->sprite_trajectory.vop_ref_points[i];
pic_param->sprite_trajectory_dv[i] =
priv->sprite_trajectory.sprite_ref_points[i];
}
pic_param->quant_precision = priv->vol_hdr.quant_precision;
@ -738,11 +754,13 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
pic_param->vop_fields.bits.vop_rounding_type = vop_hdr->rounding_type;
pic_param->vop_fields.bits.intra_dc_vlc_thr = vop_hdr->intra_dc_vlc_thr;
pic_param->vop_fields.bits.top_field_first = vop_hdr->top_field_first;
pic_param->vop_fields.bits.alternate_vertical_scan_flag = vop_hdr->alternate_vertical_scan_flag;
pic_param->vop_fields.bits.alternate_vertical_scan_flag =
vop_hdr->alternate_vertical_scan_flag;
pic_param->vop_fcode_forward = vop_hdr->fcode_forward;
pic_param->vop_fcode_backward = vop_hdr->fcode_backward;
pic_param->vop_time_increment_resolution = priv->vol_hdr.vop_time_increment_resolution;
pic_param->vop_time_increment_resolution =
priv->vol_hdr.vop_time_increment_resolution;
}
pic_param->TRB = 0;
@ -751,7 +769,8 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
case GST_MPEG4_B_VOP:
pic_param->TRB = priv->trb;
pic_param->backward_reference_picture = priv->next_picture->surface_id;
pic_param->vop_fields.bits.backward_reference_vop_coding_type = get_vop_coding_type(priv->next_picture);
pic_param->vop_fields.bits.backward_reference_vop_coding_type =
get_vop_coding_type (priv->next_picture);
// fall-through
case GST_MPEG4_P_VOP:
pic_param->TRD = priv->trd;
@ -767,12 +786,8 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
}
static GstVaapiDecoderStatus
decode_slice(
GstVaapiDecoderMpeg4 *decoder,
const guint8 *buf,
guint buf_size,
gboolean has_packet_header
)
decode_slice (GstVaapiDecoderMpeg4 * decoder,
const guint8 * buf, guint buf_size, gboolean has_packet_header)
{
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstVaapiPicture *const picture = priv->curr_picture;
@ -799,14 +814,12 @@ decode_slice(
slice_param->macroblock_number = 0;
// the header of first gob_layer is empty (gob_header_empty=1), use vop_quant
slice_param->quant_scale = priv->svh_hdr.vop_quant;
}
else {
} else {
if (has_packet_header) {
slice_param->macroblock_offset = priv->packet_hdr.size % 8;
slice_param->macroblock_number = priv->packet_hdr.macroblock_number;
slice_param->quant_scale = priv->packet_hdr.quant_scale;
}
else {
} else {
slice_param->macroblock_offset = priv->vop_hdr.size % 8;
slice_param->macroblock_number = 0;
slice_param->quant_scale = priv->vop_hdr.quant;
@ -824,25 +837,27 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
// packet.size is the size from current marker to the next.
if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_START) {
status = decode_sequence(decoder, packet.data + packet.offset, packet.size);
}
else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
status =
decode_sequence (decoder, packet.data + packet.offset, packet.size);
} else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
status = decode_sequence_end (decoder);
}
else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
status = decode_visual_object(decoder, packet.data + packet.offset, packet.size);
}
else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST && tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
GST_WARNING("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
} else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
status =
decode_visual_object (decoder, packet.data + packet.offset,
packet.size);
} else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST
&& tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
GST_WARNING
("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
}
else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST && tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
status = decode_video_object_layer(decoder, packet.data + packet.offset, packet.size);
}
else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
} else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST
&& tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
status =
decode_video_object_layer (decoder, packet.data + packet.offset,
packet.size);
} else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
status = decode_gop (decoder, packet.data + packet.offset, packet.size);
}
else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
} else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
GstMpeg4Packet video_packet;
const guint8 *_data;
gint _data_size;
@ -867,8 +882,7 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
status = decode_slice (decoder, _data, _data_size, FALSE);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
else {
} else {
GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
gboolean first_slice = TRUE;
@ -877,7 +891,9 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
while (_data_size > 0) {
// we can skip user data here
ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0, _data_size);
ret =
gst_mpeg4_parse (&video_packet, TRUE, &priv->vop_hdr, _data, 0,
_data_size);
if (ret != GST_MPEG4_PARSER_OK) {
break;
}
@ -885,13 +901,17 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
if (first_slice) {
status = decode_slice (decoder, _data, video_packet.size, FALSE);
first_slice = FALSE;
}
else {
} else {
_data += video_packet.offset;
_data_size -= video_packet.offset;
ret = gst_mpeg4_parse_video_packet_header (&priv->packet_hdr, &priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data, _data_size);
status = decode_slice(decoder,_data + priv->packet_hdr.size/8, video_packet.size - priv->packet_hdr.size/8, TRUE);
ret =
gst_mpeg4_parse_video_packet_header (&priv->packet_hdr,
&priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data,
_data_size);
status =
decode_slice (decoder, _data + priv->packet_hdr.size / 8,
video_packet.size - priv->packet_hdr.size / 8, TRUE);
}
_data += video_packet.size;
@ -899,8 +919,7 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
}
}
status = decode_current_picture (decoder);
}
else if (tos->type == GST_MPEG4_USER_DATA
} else if (tos->type == GST_MPEG4_USER_DATA
|| tos->type == GST_MPEG4_VIDEO_SESSION_ERR
|| tos->type == GST_MPEG4_FBA
|| tos->type == GST_MPEG4_FBA_PLAN
@ -916,8 +935,7 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
|| tos->type == GST_MPEG4_SYSTEM_LAST) {
GST_WARNING ("Ignore marker: %x\n", tos->type);
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
}
else {
} else {
GST_ERROR ("unsupported start code %x\n", tos->type);
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
}
@ -926,7 +944,8 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
}
static GstVaapiDecoderStatus
decode_buffer(GstVaapiDecoderMpeg4 *decoder, const guchar *buf, guint buf_size)
decode_buffer (GstVaapiDecoderMpeg4 * decoder, const guchar * buf,
guint buf_size)
{
GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
GstVaapiDecoderStatus status;
@ -942,8 +961,7 @@ decode_buffer(GstVaapiDecoderMpeg4 *decoder, const guchar *buf, guint buf_size)
status = decode_slice (decoder, buf + ofs, buf_size - ofs, FALSE);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
else {
} else {
packet.data = buf;
packet.offset = 0;
packet.size = buf_size;
@ -987,8 +1005,7 @@ gst_vaapi_decoder_mpeg4_decode_codec_data(GstVaapiDecoder *base_decoder,
status = decode_packet (decoder, packet);
if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
pos = packet.offset + packet.size;
}
else {
} else {
GST_WARNING ("decode mp4 packet failed when decoding codec data\n");
break;
}
@ -1008,8 +1025,8 @@ ensure_decoder(GstVaapiDecoderMpeg4 *decoder)
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
status = gst_vaapi_decoder_decode_codec_data(
GST_VAAPI_DECODER_CAST(decoder));
status =
gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
@ -1149,8 +1166,7 @@ gst_vaapi_decoder_mpeg4_class_init(GstVaapiDecoderMpeg4Class *klass)
decoder_class->parse = gst_vaapi_decoder_mpeg4_parse;
decoder_class->decode = gst_vaapi_decoder_mpeg4_decode;
decoder_class->decode_codec_data =
gst_vaapi_decoder_mpeg4_decode_codec_data;
decoder_class->decode_codec_data = gst_vaapi_decoder_mpeg4_decode_codec_data;
}
static inline const GstVaapiDecoderClass *

View file

@ -50,7 +50,8 @@ typedef struct _GstVaapiDecoderVC1Class GstVaapiDecoderVC1Class;
*
* A decoder based on VC1.
*/
struct _GstVaapiDecoderVC1Private {
struct _GstVaapiDecoderVC1Private
{
GstVaapiProfile profile;
guint width;
guint height;
@ -80,7 +81,8 @@ struct _GstVaapiDecoderVC1Private {
*
* A decoder based on VC1.
*/
struct _GstVaapiDecoderVC1 {
struct _GstVaapiDecoderVC1
{
/*< private > */
GstVaapiDecoder parent_instance;
GstVaapiDecoderVC1Private priv;
@ -91,7 +93,8 @@ struct _GstVaapiDecoderVC1 {
*
* A decoder class based on VC1.
*/
struct _GstVaapiDecoderVC1Class {
struct _GstVaapiDecoderVC1Class
{
/*< private > */
GstVaapiDecoderClass parent_class;
};
@ -153,8 +156,7 @@ gst_vaapi_decoder_vc1_open(GstVaapiDecoderVC1 *decoder)
static void
gst_vaapi_decoder_vc1_destroy (GstVaapiDecoder * base_decoder)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
gst_vaapi_decoder_vc1_close (decoder);
@ -169,8 +171,7 @@ gst_vaapi_decoder_vc1_destroy(GstVaapiDecoder *base_decoder)
static gboolean
gst_vaapi_decoder_vc1_create (GstVaapiDecoder * base_decoder)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
priv->profile = (GstVaapiProfile) 0;
@ -221,10 +222,8 @@ ensure_context(GstVaapiDecoderVC1 *decoder)
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
GST_VAAPI_DECODER(decoder),
&info
);
reset_context =
gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
@ -256,7 +255,8 @@ error:
}
static GstVaapiDecoderStatus
decode_sequence(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
decode_sequence (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu,
GstVC1BDU * ebdu)
{
GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
@ -267,11 +267,8 @@ decode_sequence(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
GstVaapiProfile profile;
guint width, height, fps_n, fps_d, par_n, par_d;
result = gst_vc1_parse_sequence_header(
rbdu->data + rbdu->offset,
rbdu->size,
seq_hdr
);
result = gst_vc1_parse_sequence_header (rbdu->data + rbdu->offset,
rbdu->size, seq_hdr);
if (result != GST_VC1_PARSER_OK) {
GST_ERROR ("failed to parse sequence layer");
return get_status (result);
@ -387,19 +384,16 @@ decode_sequence_end(GstVaapiDecoderVC1 *decoder)
}
static GstVaapiDecoderStatus
decode_entry_point(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
decode_entry_point (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu,
GstVC1BDU * ebdu)
{
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
GstVC1EntryPointHdr *const entrypoint_hdr = &priv->entrypoint_hdr;
GstVC1ParserResult result;
result = gst_vc1_parse_entry_point_header(
rbdu->data + rbdu->offset,
rbdu->size,
entrypoint_hdr,
seq_hdr
);
result = gst_vc1_parse_entry_point_header (rbdu->data + rbdu->offset,
rbdu->size, entrypoint_hdr, seq_hdr);
if (result != GST_VC1_PARSER_OK) {
GST_ERROR ("failed to parse entrypoint layer");
return get_status (result);
@ -422,10 +416,14 @@ static guint
get_PTYPE (guint ptype)
{
switch (ptype) {
case GST_VC1_PICTURE_TYPE_I: return 0;
case GST_VC1_PICTURE_TYPE_P: return 1;
case GST_VC1_PICTURE_TYPE_B: return 2;
case GST_VC1_PICTURE_TYPE_BI: return 3;
case GST_VC1_PICTURE_TYPE_I:
return 0;
case GST_VC1_PICTURE_TYPE_P:
return 1;
case GST_VC1_PICTURE_TYPE_B:
return 2;
case GST_VC1_PICTURE_TYPE_BI:
return 3;
}
return 4; /* skipped P-frame */
}
@ -436,34 +434,36 @@ get_BFRACTION(guint bfraction)
{
guint i;
static const struct {
static const struct
{
guint16 index;
guint16 value;
}
bfraction_map[] = {
{ 0, GST_VC1_BFRACTION_BASIS / 2 },
{ 1, GST_VC1_BFRACTION_BASIS / 3 },
{ 2, (GST_VC1_BFRACTION_BASIS * 2) / 3 },
{ 3, GST_VC1_BFRACTION_BASIS / 4 },
{ 4, (GST_VC1_BFRACTION_BASIS * 3) / 4 },
{ 5, GST_VC1_BFRACTION_BASIS / 5 },
{ 6, (GST_VC1_BFRACTION_BASIS * 2) / 5 },
{ 7, (GST_VC1_BFRACTION_BASIS * 3) / 5 },
{ 8, (GST_VC1_BFRACTION_BASIS * 4) / 5 },
{ 9, GST_VC1_BFRACTION_BASIS / 6 },
{ 10, (GST_VC1_BFRACTION_BASIS * 5) / 6 },
{ 11, GST_VC1_BFRACTION_BASIS / 7 },
{ 12, (GST_VC1_BFRACTION_BASIS * 2) / 7 },
{ 13, (GST_VC1_BFRACTION_BASIS * 3) / 7 },
{ 14, (GST_VC1_BFRACTION_BASIS * 4) / 7 },
{ 15, (GST_VC1_BFRACTION_BASIS * 5) / 7 },
{ 16, (GST_VC1_BFRACTION_BASIS * 6) / 7 },
{ 17, GST_VC1_BFRACTION_BASIS / 8 },
{ 18, (GST_VC1_BFRACTION_BASIS * 3) / 8 },
{ 19, (GST_VC1_BFRACTION_BASIS * 5) / 8 },
{ 20, (GST_VC1_BFRACTION_BASIS * 7) / 8 },
{ 21, GST_VC1_BFRACTION_RESERVED },
{ 22, GST_VC1_BFRACTION_PTYPE_BI }
{
0, GST_VC1_BFRACTION_BASIS / 2}, {
1, GST_VC1_BFRACTION_BASIS / 3}, {
2, (GST_VC1_BFRACTION_BASIS * 2) / 3}, {
3, GST_VC1_BFRACTION_BASIS / 4}, {
4, (GST_VC1_BFRACTION_BASIS * 3) / 4}, {
5, GST_VC1_BFRACTION_BASIS / 5}, {
6, (GST_VC1_BFRACTION_BASIS * 2) / 5}, {
7, (GST_VC1_BFRACTION_BASIS * 3) / 5}, {
8, (GST_VC1_BFRACTION_BASIS * 4) / 5}, {
9, GST_VC1_BFRACTION_BASIS / 6}, {
10, (GST_VC1_BFRACTION_BASIS * 5) / 6}, {
11, GST_VC1_BFRACTION_BASIS / 7}, {
12, (GST_VC1_BFRACTION_BASIS * 2) / 7}, {
13, (GST_VC1_BFRACTION_BASIS * 3) / 7}, {
14, (GST_VC1_BFRACTION_BASIS * 4) / 7}, {
15, (GST_VC1_BFRACTION_BASIS * 5) / 7}, {
16, (GST_VC1_BFRACTION_BASIS * 6) / 7}, {
17, GST_VC1_BFRACTION_BASIS / 8}, {
18, (GST_VC1_BFRACTION_BASIS * 3) / 8}, {
19, (GST_VC1_BFRACTION_BASIS * 5) / 8}, {
20, (GST_VC1_BFRACTION_BASIS * 7) / 8}, {
21, GST_VC1_BFRACTION_RESERVED}, {
22, GST_VC1_BFRACTION_PTYPE_BI}
};
if (!bfraction)
@ -481,11 +481,16 @@ static guint
get_VAMvModeVC1 (guint mvmode)
{
switch (mvmode) {
case GST_VC1_MVMODE_1MV_HPEL_BILINEAR: return VAMvMode1MvHalfPelBilinear;
case GST_VC1_MVMODE_1MV: return VAMvMode1Mv;
case GST_VC1_MVMODE_1MV_HPEL: return VAMvMode1MvHalfPel;
case GST_VC1_MVMODE_MIXED_MV: return VAMvModeMixedMv;
case GST_VC1_MVMODE_INTENSITY_COMP: return VAMvModeIntensityCompensation;
case GST_VC1_MVMODE_1MV_HPEL_BILINEAR:
return VAMvMode1MvHalfPelBilinear;
case GST_VC1_MVMODE_1MV:
return VAMvMode1Mv;
case GST_VC1_MVMODE_1MV_HPEL:
return VAMvMode1MvHalfPel;
case GST_VC1_MVMODE_MIXED_MV:
return VAMvModeMixedMv;
case GST_VC1_MVMODE_INTENSITY_COMP:
return VAMvModeIntensityCompensation;
}
return 0;
}
@ -516,8 +521,7 @@ get_MVMODE2(GstVC1FrameHdr *frame_hdr)
if (frame_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
mvmode = frame_hdr->pic.advanced.mvmode;
mvmode2 = frame_hdr->pic.advanced.mvmode2;
}
else {
} else {
mvmode = frame_hdr->pic.simple.mvmode;
mvmode2 = frame_hdr->pic.simple.mvmode2;
}
@ -542,8 +546,7 @@ has_MVTYPEMB_bitplane(GstVaapiDecoderVC1 *decoder)
return 0;
mvmode = pic->mvmode;
mvmode2 = pic->mvmode2;
}
else {
} else {
GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
if (pic->mvtypemb)
return 0;
@ -567,8 +570,7 @@ has_SKIPMB_bitplane(GstVaapiDecoderVC1 *decoder)
GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
if (pic->skipmb)
return 0;
}
else {
} else {
GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
if (pic->skipmb)
return 0;
@ -588,8 +590,7 @@ has_DIRECTMB_bitplane(GstVaapiDecoderVC1 *decoder)
GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
if (pic->directmb)
return 0;
}
else {
} else {
GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
if (pic->directmb)
return 0;
@ -633,7 +634,8 @@ has_OVERFLAGS_bitplane(GstVaapiDecoderVC1 *decoder)
}
static inline void
pack_bitplanes(GstVaapiBitPlane *bitplane, guint n, const guint8 *bitplanes[3], guint x, guint y, guint stride)
pack_bitplanes (GstVaapiBitPlane * bitplane, guint n,
const guint8 * bitplanes[3], guint x, guint y, guint stride)
{
const guint dst_index = n / 2;
const guint src_index = y * stride + x;
@ -677,16 +679,20 @@ fill_picture_structc(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
pic_param->raw_coding.flags.mv_type_mb = pic->mvtypemb;
pic_param->raw_coding.flags.direct_mb = pic->directmb;
pic_param->raw_coding.flags.skip_mb = pic->skipmb;
pic_param->bitplane_present.flags.bp_mv_type_mb = has_MVTYPEMB_bitplane(decoder);
pic_param->bitplane_present.flags.bp_direct_mb = has_DIRECTMB_bitplane(decoder);
pic_param->bitplane_present.flags.bp_mv_type_mb =
has_MVTYPEMB_bitplane (decoder);
pic_param->bitplane_present.flags.bp_direct_mb =
has_DIRECTMB_bitplane (decoder);
pic_param->bitplane_present.flags.bp_skip_mb = has_SKIPMB_bitplane (decoder);
pic_param->mv_fields.bits.mv_table = pic->mvtab;
pic_param->mv_fields.bits.extended_mv_flag = structc->extended_mv;
pic_param->mv_fields.bits.extended_mv_range = pic->mvrange;
pic_param->transform_fields.bits.variable_sized_transform_flag = structc->vstransform;
pic_param->transform_fields.bits.variable_sized_transform_flag =
structc->vstransform;
pic_param->transform_fields.bits.mb_level_transform_type_flag = pic->ttmbf;
pic_param->transform_fields.bits.frame_level_transform_type = pic->ttfrm;
pic_param->transform_fields.bits.transform_ac_codingset_idx2 = pic->transacfrm2;
pic_param->transform_fields.bits.transform_ac_codingset_idx2 =
pic->transacfrm2;
/* Refer to 8.3.7 Rounding control for Simple and Main Profile */
if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
@ -726,9 +732,11 @@ fill_picture_advanced(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
pic_param->entrypoint_fields.bits.loopfilter = entrypoint_hdr->loopfilter;
pic_param->conditional_overlap_flag = pic->condover;
pic_param->fast_uvmc_flag = entrypoint_hdr->fastuvmc;
pic_param->range_mapping_fields.bits.luma_flag = entrypoint_hdr->range_mapy_flag;
pic_param->range_mapping_fields.bits.luma_flag =
entrypoint_hdr->range_mapy_flag;
pic_param->range_mapping_fields.bits.luma = entrypoint_hdr->range_mapy;
pic_param->range_mapping_fields.bits.chroma_flag = entrypoint_hdr->range_mapuv_flag;
pic_param->range_mapping_fields.bits.chroma_flag =
entrypoint_hdr->range_mapuv_flag;
pic_param->range_mapping_fields.bits.chroma = entrypoint_hdr->range_mapuv;
pic_param->b_picture_fraction = get_BFRACTION (pic->bfraction);
pic_param->cbp_table = pic->cbptab;
@ -742,28 +750,35 @@ fill_picture_advanced(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
pic_param->picture_fields.bits.frame_coding_mode = pic->fcm;
pic_param->picture_fields.bits.top_field_first = pic->tff;
pic_param->picture_fields.bits.is_first_field = pic->fcm == 0; /* XXX: interlaced frame */
pic_param->picture_fields.bits.intensity_compensation = pic->mvmode == GST_VC1_MVMODE_INTENSITY_COMP;
pic_param->picture_fields.bits.intensity_compensation =
pic->mvmode == GST_VC1_MVMODE_INTENSITY_COMP;
pic_param->raw_coding.flags.mv_type_mb = pic->mvtypemb;
pic_param->raw_coding.flags.direct_mb = pic->directmb;
pic_param->raw_coding.flags.skip_mb = pic->skipmb;
pic_param->raw_coding.flags.ac_pred = pic->acpred;
pic_param->raw_coding.flags.overflags = pic->overflags;
pic_param->bitplane_present.flags.bp_mv_type_mb = has_MVTYPEMB_bitplane(decoder);
pic_param->bitplane_present.flags.bp_direct_mb = has_DIRECTMB_bitplane(decoder);
pic_param->bitplane_present.flags.bp_mv_type_mb =
has_MVTYPEMB_bitplane (decoder);
pic_param->bitplane_present.flags.bp_direct_mb =
has_DIRECTMB_bitplane (decoder);
pic_param->bitplane_present.flags.bp_skip_mb = has_SKIPMB_bitplane (decoder);
pic_param->bitplane_present.flags.bp_ac_pred = has_ACPRED_bitplane (decoder);
pic_param->bitplane_present.flags.bp_overflags = has_OVERFLAGS_bitplane(decoder);
pic_param->reference_fields.bits.reference_distance_flag = entrypoint_hdr->refdist_flag;
pic_param->bitplane_present.flags.bp_overflags =
has_OVERFLAGS_bitplane (decoder);
pic_param->reference_fields.bits.reference_distance_flag =
entrypoint_hdr->refdist_flag;
pic_param->mv_fields.bits.mv_table = pic->mvtab;
pic_param->mv_fields.bits.extended_mv_flag = entrypoint_hdr->extended_mv;
pic_param->mv_fields.bits.extended_mv_range = pic->mvrange;
pic_param->mv_fields.bits.extended_dmv_flag = entrypoint_hdr->extended_dmv;
pic_param->pic_quantizer_fields.bits.dquant = entrypoint_hdr->dquant;
pic_param->pic_quantizer_fields.bits.quantizer = entrypoint_hdr->quantizer;
pic_param->transform_fields.bits.variable_sized_transform_flag = entrypoint_hdr->vstransform;
pic_param->transform_fields.bits.variable_sized_transform_flag =
entrypoint_hdr->vstransform;
pic_param->transform_fields.bits.mb_level_transform_type_flag = pic->ttmbf;
pic_param->transform_fields.bits.frame_level_transform_type = pic->ttfrm;
pic_param->transform_fields.bits.transform_ac_codingset_idx2 = pic->transacfrm2;
pic_param->transform_fields.bits.transform_ac_codingset_idx2 =
pic->transacfrm2;
return TRUE;
}
@ -800,22 +815,28 @@ fill_picture(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
pic_param->pic_quantizer_fields.value = 0;
pic_param->pic_quantizer_fields.bits.half_qp = frame_hdr->halfqp;
pic_param->pic_quantizer_fields.bits.pic_quantizer_scale = frame_hdr->pquant;
pic_param->pic_quantizer_fields.bits.pic_quantizer_type = frame_hdr->pquantizer;
pic_param->pic_quantizer_fields.bits.pic_quantizer_type =
frame_hdr->pquantizer;
pic_param->pic_quantizer_fields.bits.dq_frame = vopdquant->dquantfrm;
pic_param->pic_quantizer_fields.bits.dq_profile = vopdquant->dqprofile;
pic_param->pic_quantizer_fields.bits.dq_sb_edge = vopdquant->dqprofile == GST_VC1_DQPROFILE_SINGLE_EDGE ? vopdquant->dqbedge : 0;
pic_param->pic_quantizer_fields.bits.dq_db_edge = vopdquant->dqprofile == GST_VC1_DQPROFILE_DOUBLE_EDGES ? vopdquant->dqbedge : 0;
pic_param->pic_quantizer_fields.bits.dq_sb_edge =
vopdquant->dqprofile ==
GST_VC1_DQPROFILE_SINGLE_EDGE ? vopdquant->dqbedge : 0;
pic_param->pic_quantizer_fields.bits.dq_db_edge =
vopdquant->dqprofile ==
GST_VC1_DQPROFILE_DOUBLE_EDGES ? vopdquant->dqbedge : 0;
pic_param->pic_quantizer_fields.bits.dq_binary_level = vopdquant->dqbilevel;
pic_param->pic_quantizer_fields.bits.alt_pic_quantizer = vopdquant->altpquant;
pic_param->transform_fields.value = 0;
pic_param->transform_fields.bits.transform_ac_codingset_idx1 = frame_hdr->transacfrm;
pic_param->transform_fields.bits.intra_transform_dc_table = frame_hdr->transdctab;
pic_param->transform_fields.bits.transform_ac_codingset_idx1 =
frame_hdr->transacfrm;
pic_param->transform_fields.bits.intra_transform_dc_table =
frame_hdr->transdctab;
if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
if (!fill_picture_advanced (decoder, picture))
return FALSE;
}
else {
} else {
if (!fill_picture_structc (decoder, picture))
return FALSE;
}
@ -846,20 +867,27 @@ fill_picture(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
switch (picture->type) {
case GST_VAAPI_PICTURE_TYPE_P:
bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ? priv->bitplanes->directmb : NULL;
bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ? priv->bitplanes->skipmb : NULL;
bitplanes[2] = pic_param->bitplane_present.flags.bp_mv_type_mb ? priv->bitplanes->mvtypemb : NULL;
bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ?
priv->bitplanes->directmb : NULL;
bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ?
priv->bitplanes->skipmb : NULL;
bitplanes[2] = pic_param->bitplane_present.flags.bp_mv_type_mb ?
priv->bitplanes->mvtypemb : NULL;
break;
case GST_VAAPI_PICTURE_TYPE_B:
bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ? priv->bitplanes->directmb : NULL;
bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ? priv->bitplanes->skipmb : NULL;
bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ?
priv->bitplanes->directmb : NULL;
bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ?
priv->bitplanes->skipmb : NULL;
bitplanes[2] = NULL; /* XXX: interlaced frame (FORWARD plane) */
break;
case GST_VAAPI_PICTURE_TYPE_BI:
case GST_VAAPI_PICTURE_TYPE_I:
bitplanes[0] = NULL; /* XXX: interlaced frame (FIELDTX plane) */
bitplanes[1] = pic_param->bitplane_present.flags.bp_ac_pred ? priv->bitplanes->acpred : NULL;
bitplanes[2] = pic_param->bitplane_present.flags.bp_overflags ? priv->bitplanes->overflags : NULL;
bitplanes[1] = pic_param->bitplane_present.flags.bp_ac_pred ?
priv->bitplanes->acpred : NULL;
bitplanes[2] = pic_param->bitplane_present.flags.bp_overflags ?
priv->bitplanes->overflags : NULL;
break;
default:
bitplanes[0] = NULL;
@ -868,17 +896,16 @@ fill_picture(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
break;
}
picture->bitplane = GST_VAAPI_BITPLANE_NEW(
decoder,
(seq_hdr->mb_width * seq_hdr->mb_height + 1) / 2
);
picture->bitplane = GST_VAAPI_BITPLANE_NEW (decoder,
(seq_hdr->mb_width * seq_hdr->mb_height + 1) / 2);
if (!picture->bitplane)
return FALSE;
n = 0;
for (y = 0; y < seq_hdr->mb_height; y++)
for (x = 0; x < seq_hdr->mb_width; x++, n++)
pack_bitplanes(picture->bitplane, n, bitplanes, x, y, seq_hdr->mb_stride);
pack_bitplanes (picture->bitplane, n, bitplanes, x, y,
seq_hdr->mb_stride);
if (n & 1) /* move last nibble to the high order */
picture->bitplane->data[n / 2] <<= 4;
}
@ -920,13 +947,8 @@ decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
GstVaapiPicture *const picture = priv->current_picture;
memset (frame_hdr, 0, sizeof (*frame_hdr));
result = gst_vc1_parse_frame_header(
rbdu->data + rbdu->offset,
rbdu->size,
frame_hdr,
&priv->seq_hdr,
priv->bitplanes
);
result = gst_vc1_parse_frame_header (rbdu->data + rbdu->offset,
rbdu->size, frame_hdr, &priv->seq_hdr, priv->bitplanes);
if (result != GST_VC1_PARSER_OK) {
GST_ERROR ("failed to parse frame layer");
return get_status (result);
@ -959,8 +981,7 @@ decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
(priv->last_non_b_picture->poc + 1) : priv->next_poc;
priv->next_poc = picture->poc + 1;
gst_vaapi_picture_replace (&priv->last_non_b_picture, picture);
}
else if (!priv->last_non_b_picture)
} else if (!priv->last_non_b_picture)
picture->poc = priv->next_poc++;
else { /* B or BI */
picture->poc = priv->last_non_b_picture->poc++;
@ -981,12 +1002,8 @@ decode_slice(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
GstVC1ParserResult result;
memset (&slice_hdr, 0, sizeof (slice_hdr));
result = gst_vc1_parse_slice_header(
rbdu->data + rbdu->offset,
rbdu->size,
&slice_hdr,
&priv->seq_hdr
);
result = gst_vc1_parse_slice_header (rbdu->data + rbdu->offset,
rbdu->size, &slice_hdr, &priv->seq_hdr);
if (result != GST_VC1_PARSER_OK) {
GST_ERROR ("failed to parse slice layer");
return get_status (result);
@ -1022,15 +1039,13 @@ decode_rbdu(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
if (ebdu->size < 4) {
memcpy (rbdu_buffer, ebdu->data + ebdu->offset, ebdu->size);
rbdu_buffer_size = ebdu->size;
}
else {
} else {
guint8 *const bdu_buffer = ebdu->data + ebdu->offset;
for (i = 0, j = 0; i < ebdu->size; i++) {
if (i >= 2 && i < ebdu->size - 1 &&
bdu_buffer[i - 1] == 0x00 &&
bdu_buffer[i - 2] == 0x00 &&
bdu_buffer[i ] == 0x03 &&
bdu_buffer[i + 1] <= 0x03)
bdu_buffer[i] == 0x03 && bdu_buffer[i + 1] <= 0x03)
i++;
rbdu_buffer[j++] = bdu_buffer[i];
}
@ -1096,8 +1111,7 @@ decode_buffer(GstVaapiDecoderVC1 *decoder, guchar *buf, guint buf_size)
ebdu.type = GST_VC1_FRAME;
ebdu.sc_offset = 0;
ebdu.offset = 0;
}
else {
} else {
ebdu.type = buf[3];
ebdu.sc_offset = 0;
ebdu.offset = 4;
@ -1111,8 +1125,7 @@ static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_decode_codec_data (GstVaapiDecoder * base_decoder,
const guchar * buf, guint buf_size)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
GstVaapiDecoderStatus status;
@ -1173,11 +1186,7 @@ gst_vaapi_decoder_vc1_decode_codec_data(GstVaapiDecoder *base_decoder,
ofs = 0;
do {
result = gst_vc1_identify_next_bdu(
buf + ofs,
buf_size - ofs,
&ebdu
);
result = gst_vc1_identify_next_bdu (buf + ofs, buf_size - ofs, &ebdu);
switch (result) {
case GST_VC1_PARSER_NO_BDU_END:
@ -1207,8 +1216,8 @@ ensure_decoder(GstVaapiDecoderVC1 *decoder)
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
status = gst_vaapi_decoder_decode_codec_data(
GST_VAAPI_DECODER_CAST(decoder));
status =
gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
@ -1226,8 +1235,7 @@ static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_parse (GstVaapiDecoder * base_decoder,
GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
GstVaapiDecoderStatus status;
guint8 bdu_type;
@ -1246,8 +1254,7 @@ gst_vaapi_decoder_vc1_parse(GstVaapiDecoder *base_decoder,
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
buf_size = size;
bdu_type = GST_VC1_FRAME;
}
else {
} else {
if (size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
@ -1297,8 +1304,7 @@ static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_decode (GstVaapiDecoder * base_decoder,
GstVaapiDecoderUnit * unit)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderStatus status;
GstBuffer *const buffer =
GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
@ -1324,8 +1330,7 @@ static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_start_frame (GstVaapiDecoder * base_decoder,
GstVaapiDecoderUnit * unit)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
GstVaapiDecoderStatus status;
GstVaapiPicture *picture;
@ -1374,8 +1379,7 @@ gst_vaapi_decoder_vc1_start_frame(GstVaapiDecoder *base_decoder,
static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_end_frame (GstVaapiDecoder * base_decoder)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
return decode_current_picture (decoder);
}
@ -1383,8 +1387,7 @@ gst_vaapi_decoder_vc1_end_frame(GstVaapiDecoder *base_decoder)
static GstVaapiDecoderStatus
gst_vaapi_decoder_vc1_flush (GstVaapiDecoder * base_decoder)
{
GstVaapiDecoderVC1 * const decoder =
GST_VAAPI_DECODER_VC1_CAST(base_decoder);
GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
GstVaapiDecoderVC1Private *const priv = &decoder->priv;
gst_vaapi_dpb_flush (priv->dpb);
@ -1409,8 +1412,7 @@ gst_vaapi_decoder_vc1_class_init(GstVaapiDecoderVC1Class *klass)
decoder_class->end_frame = gst_vaapi_decoder_vc1_end_frame;
decoder_class->flush = gst_vaapi_decoder_vc1_flush;
decoder_class->decode_codec_data =
gst_vaapi_decoder_vc1_decode_codec_data;
decoder_class->decode_codec_data = gst_vaapi_decoder_vc1_decode_codec_data;
}
static inline const GstVaapiDecoderClass *

View file

@ -47,8 +47,7 @@
static gboolean
_gst_vaapi_image_map (GstVaapiImage * image, GstVaapiImageRaw * raw_image);
static gboolean
_gst_vaapi_image_unmap(GstVaapiImage *image);
static gboolean _gst_vaapi_image_unmap (GstVaapiImage * image);
static gboolean
_gst_vaapi_image_set_image (GstVaapiImage * image, const VAImage * va_image);
@ -141,13 +140,9 @@ _gst_vaapi_image_create(GstVaapiImage *image, GstVideoFormat format)
return FALSE;
GST_VAAPI_DISPLAY_LOCK (display);
status = vaCreateImage(
GST_VAAPI_DISPLAY_VADISPLAY(display),
status = vaCreateImage (GST_VAAPI_DISPLAY_VADISPLAY (display),
(VAImageFormat *) va_format,
image->width,
image->height,
&image->internal_image
);
image->width, image->height, &image->internal_image);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (status != VA_STATUS_SUCCESS ||
image->internal_image.format.fourcc != va_format->fourcc)
@ -220,17 +215,14 @@ gst_vaapi_image_init(GstVaapiImage *image)
static void
gst_vaapi_image_class_init (GstVaapiImageClass * klass)
{
GstVaapiObjectClass * const object_class =
GST_VAAPI_OBJECT_CLASS(klass);
GstVaapiObjectClass *const object_class = GST_VAAPI_OBJECT_CLASS (klass);
object_class->init = (GstVaapiObjectInitFunc) gst_vaapi_image_init;
}
#define gst_vaapi_image_finalize gst_vaapi_image_destroy
GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
GstVaapiImage,
gst_vaapi_image,
gst_vaapi_image_class_init(&g_class))
GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE (GstVaapiImage,
gst_vaapi_image, gst_vaapi_image_class_init (&g_class))
/**
* gst_vaapi_image_new:
@ -244,13 +236,8 @@ GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
*
* Return value: the newly allocated #GstVaapiImage object
*/
GstVaapiImage *
gst_vaapi_image_new(
GstVaapiDisplay *display,
GstVideoFormat format,
guint width,
guint height
)
GstVaapiImage *gst_vaapi_image_new (GstVaapiDisplay * display,
GstVideoFormat format, guint width, guint height)
{
GstVaapiImage *image;
@ -471,7 +458,8 @@ gst_vaapi_image_get_height(GstVaapiImage *image)
* Retrieves the dimensions of a #GstVaapiImage.
*/
void
gst_vaapi_image_get_size(GstVaapiImage *image, guint *pwidth, guint *pheight)
gst_vaapi_image_get_size (GstVaapiImage * image, guint * pwidth,
guint * pheight)
{
g_return_if_fail (image != NULL);
@ -554,11 +542,8 @@ _gst_vaapi_image_map(GstVaapiImage *image, GstVaapiImageRaw *raw_image)
return FALSE;
GST_VAAPI_DISPLAY_LOCK (display);
status = vaMapBuffer(
GST_VAAPI_DISPLAY_VADISPLAY(display),
image->image.buf,
(void **)&image->image_data
);
status = vaMapBuffer (GST_VAAPI_DISPLAY_VADISPLAY (display),
image->image.buf, (void **) &image->image_data);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaMapBuffer()"))
return FALSE;
@ -610,10 +595,8 @@ _gst_vaapi_image_unmap(GstVaapiImage *image)
return FALSE;
GST_VAAPI_DISPLAY_LOCK (display);
status = vaUnmapBuffer(
GST_VAAPI_DISPLAY_VADISPLAY(display),
image->image.buf
);
status = vaUnmapBuffer (GST_VAAPI_DISPLAY_VADISPLAY (display),
image->image.buf);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaUnmapBuffer()"))
return FALSE;
@ -717,14 +700,9 @@ init_image_from_buffer(GstVaapiImageRaw *raw_image, GstBuffer *buffer)
/* Copy N lines of an image */
static inline void
memcpy_pic(
guchar *dst,
memcpy_pic (guchar * dst,
guint dst_stride,
const guchar *src,
guint src_stride,
guint len,
guint height
)
const guchar * src, guint src_stride, guint len, guint height)
{
guint i;
@ -737,11 +715,8 @@ memcpy_pic(
/* Copy NV12 images */
static void
copy_image_NV12(
GstVaapiImageRaw *dst_image,
GstVaapiImageRaw *src_image,
const GstVaapiRectangle *rect
)
copy_image_NV12 (GstVaapiImageRaw * dst_image,
GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
{
guchar *dst, *src;
guint dst_stride, src_stride;
@ -763,11 +738,8 @@ copy_image_NV12(
/* Copy YV12 images */
static void
copy_image_YV12(
GstVaapiImageRaw *dst_image,
GstVaapiImageRaw *src_image,
const GstVaapiRectangle *rect
)
copy_image_YV12 (GstVaapiImageRaw * dst_image,
GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
{
guchar *dst, *src;
guint dst_stride, src_stride;
@ -796,11 +768,8 @@ copy_image_YV12(
/* Copy YUY2 images */
static void
copy_image_YUY2(
GstVaapiImageRaw *dst_image,
GstVaapiImageRaw *src_image,
const GstVaapiRectangle *rect
)
copy_image_YUY2 (GstVaapiImageRaw * dst_image,
GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
{
guchar *dst, *src;
guint dst_stride, src_stride;
@ -815,11 +784,8 @@ copy_image_YUY2(
/* Copy RGBA images */
static void
copy_image_RGBA(
GstVaapiImageRaw *dst_image,
GstVaapiImageRaw *src_image,
const GstVaapiRectangle *rect
)
copy_image_RGBA (GstVaapiImageRaw * dst_image,
GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
{
guchar *dst, *src;
guint dst_stride, src_stride;
@ -832,11 +798,8 @@ copy_image_RGBA(
}
static gboolean
copy_image(
GstVaapiImageRaw *dst_image,
GstVaapiImageRaw *src_image,
const GstVaapiRectangle *rect
)
copy_image (GstVaapiImageRaw * dst_image,
GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
{
GstVaapiRectangle default_rect;
@ -851,8 +814,7 @@ copy_image(
rect->y >= src_image->height ||
rect->y + rect->height > src_image->height)
return FALSE;
}
else {
} else {
default_rect.x = 0;
default_rect.y = 0;
default_rect.width = src_image->width;
@ -898,11 +860,8 @@ copy_image(
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_image_get_buffer(
GstVaapiImage *image,
GstBuffer *buffer,
GstVaapiRectangle *rect
)
gst_vaapi_image_get_buffer (GstVaapiImage * image,
GstBuffer * buffer, GstVaapiRectangle * rect)
{
GstVaapiImageRaw dst_image, src_image;
gboolean success;
@ -941,11 +900,8 @@ gst_vaapi_image_get_buffer(
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_image_get_raw(
GstVaapiImage *image,
GstVaapiImageRaw *dst_image,
GstVaapiRectangle *rect
)
gst_vaapi_image_get_raw (GstVaapiImage * image,
GstVaapiImageRaw * dst_image, GstVaapiRectangle * rect)
{
GstVaapiImageRaw src_image;
gboolean success;
@ -976,11 +932,8 @@ gst_vaapi_image_get_raw(
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_image_update_from_buffer(
GstVaapiImage *image,
GstBuffer *buffer,
GstVaapiRectangle *rect
)
gst_vaapi_image_update_from_buffer (GstVaapiImage * image,
GstBuffer * buffer, GstVaapiRectangle * rect)
{
GstVaapiImageRaw dst_image, src_image;
gboolean success;
@ -1020,11 +973,8 @@ gst_vaapi_image_update_from_buffer(
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_image_update_from_raw(
GstVaapiImage *image,
GstVaapiImageRaw *src_image,
GstVaapiRectangle *rect
)
gst_vaapi_image_update_from_raw (GstVaapiImage * image,
GstVaapiImageRaw * src_image, GstVaapiRectangle * rect)
{
GstVaapiImageRaw dst_image;
gboolean success;

View file

@ -212,7 +212,8 @@ gst_vaapi_pixmap_get_height(GstVaapiPixmap *pixmap)
* Retrieves the dimensions of a #GstVaapiPixmap.
*/
void
gst_vaapi_pixmap_get_size(GstVaapiPixmap *pixmap, guint *width, guint *height)
gst_vaapi_pixmap_get_size (GstVaapiPixmap * pixmap, guint * width,
guint * height)
{
g_return_if_fail (pixmap != NULL);
@ -240,8 +241,8 @@ gst_vaapi_pixmap_get_size(GstVaapiPixmap *pixmap, guint *width, guint *height)
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_pixmap_put_surface(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
const GstVaapiRectangle *crop_rect, guint flags)
gst_vaapi_pixmap_put_surface (GstVaapiPixmap * pixmap,
GstVaapiSurface * surface, const GstVaapiRectangle * crop_rect, guint flags)
{
GstVaapiRectangle src_rect;

View file

@ -40,11 +40,13 @@
typedef struct _GstVaapiPixmapX11Class GstVaapiPixmapX11Class;
struct _GstVaapiPixmapX11 {
struct _GstVaapiPixmapX11
{
GstVaapiPixmap parent_instance;
};
struct _GstVaapiPixmapX11Class {
struct _GstVaapiPixmapX11Class
{
GstVaapiPixmapClass parent_class;
};
@ -64,8 +66,9 @@ gst_vaapi_pixmap_x11_create_from_xid(GstVaapiPixmap *pixmap, Pixmap xid)
if (!success)
return FALSE;
pixmap->format = gst_vaapi_display_x11_get_pixmap_format(
GST_VAAPI_OBJECT_DISPLAY_X11(pixmap), depth);
pixmap->format =
gst_vaapi_display_x11_get_pixmap_format (GST_VAAPI_OBJECT_DISPLAY_X11
(pixmap), depth);
if (pixmap->format == GST_VIDEO_FORMAT_UNKNOWN)
return FALSE;
return TRUE;
@ -126,8 +129,7 @@ gst_vaapi_pixmap_x11_render(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
return FALSE;
GST_VAAPI_OBJECT_LOCK_DISPLAY (pixmap);
status = vaPutSurface(
GST_VAAPI_OBJECT_VADISPLAY(pixmap),
status = vaPutSurface (GST_VAAPI_OBJECT_VADISPLAY (pixmap),
surface_id,
GST_VAAPI_OBJECT_ID (pixmap),
crop_rect->x, crop_rect->y,
@ -135,8 +137,7 @@ gst_vaapi_pixmap_x11_render(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
0, 0,
GST_VAAPI_PIXMAP_WIDTH (pixmap),
GST_VAAPI_PIXMAP_HEIGHT (pixmap),
NULL, 0,
from_GstVaapiSurfaceRenderFlags(flags)
NULL, 0, from_GstVaapiSurfaceRenderFlags (flags)
);
GST_VAAPI_OBJECT_UNLOCK_DISPLAY (pixmap);
if (!vaapi_check_status (status, "vaPutSurface() [pixmap]"))
@ -147,10 +148,8 @@ gst_vaapi_pixmap_x11_render(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
void
gst_vaapi_pixmap_x11_class_init (GstVaapiPixmapX11Class * klass)
{
GstVaapiObjectClass * const object_class =
GST_VAAPI_OBJECT_CLASS(klass);
GstVaapiPixmapClass * const pixmap_class =
GST_VAAPI_PIXMAP_CLASS(klass);
GstVaapiObjectClass *const object_class = GST_VAAPI_OBJECT_CLASS (klass);
GstVaapiPixmapClass *const pixmap_class = GST_VAAPI_PIXMAP_CLASS (klass);
object_class->finalize = (GstVaapiObjectFinalizeFunc)
gst_vaapi_pixmap_x11_destroy;
@ -162,10 +161,8 @@ gst_vaapi_pixmap_x11_class_init(GstVaapiPixmapX11Class *klass)
#define gst_vaapi_pixmap_x11_finalize \
gst_vaapi_pixmap_x11_destroy
GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
GstVaapiPixmapX11,
gst_vaapi_pixmap_x11,
gst_vaapi_pixmap_x11_class_init(&g_class))
GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE (GstVaapiPixmapX11,
gst_vaapi_pixmap_x11, gst_vaapi_pixmap_x11_class_init (&g_class))
/**
* gst_vaapi_pixmap_x11_new:
@ -179,17 +176,17 @@ GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
*
* Return value: the newly allocated #GstVaapiPixmap object
*/
GstVaapiPixmap *
gst_vaapi_pixmap_x11_new(GstVaapiDisplay *display, GstVideoFormat format,
guint width, guint height)
GstVaapiPixmap *gst_vaapi_pixmap_x11_new (GstVaapiDisplay * display,
GstVideoFormat format, guint width, guint height)
{
GST_DEBUG ("new pixmap, format %s, size %ux%u",
gst_vaapi_video_format_to_string (format), width, height);
g_return_val_if_fail (GST_VAAPI_IS_DISPLAY_X11 (display), NULL);
return gst_vaapi_pixmap_new(GST_VAAPI_PIXMAP_CLASS(
gst_vaapi_pixmap_x11_class()), display, format, width, height);
return
gst_vaapi_pixmap_new (GST_VAAPI_PIXMAP_CLASS (gst_vaapi_pixmap_x11_class
()), display, format, width, height);
}
/**
@ -212,8 +209,9 @@ gst_vaapi_pixmap_x11_new_with_xid(GstVaapiDisplay *display, Pixmap xid)
g_return_val_if_fail (GST_VAAPI_IS_DISPLAY_X11 (display), NULL);
g_return_val_if_fail (xid != None, NULL);
return gst_vaapi_pixmap_new_from_native(GST_VAAPI_PIXMAP_CLASS(
gst_vaapi_pixmap_x11_class()), display, GSIZE_TO_POINTER(xid));
return
gst_vaapi_pixmap_new_from_native (GST_VAAPI_PIXMAP_CLASS
(gst_vaapi_pixmap_x11_class ()), display, GSIZE_TO_POINTER (xid));
}
/**

View file

@ -38,19 +38,22 @@ typedef struct _GstVaapiCodecMap GstVaapiCodecMap;
typedef struct _GstVaapiProfileMap GstVaapiProfileMap;
typedef struct _GstVaapiEntrypointMap GstVaapiEntrypointMap;
struct _GstVaapiCodecMap {
struct _GstVaapiCodecMap
{
GstVaapiCodec codec;
const gchar *name;
};
struct _GstVaapiProfileMap {
struct _GstVaapiProfileMap
{
GstVaapiProfile profile;
VAProfile va_profile;
const char *media_str;
const gchar *profile_str;
};
struct _GstVaapiEntrypointMap {
struct _GstVaapiEntrypointMap
{
GstVaapiEntrypoint entrypoint;
VAEntrypoint va_entrypoint;
};
@ -74,85 +77,63 @@ static const GstVaapiCodecMap gst_vaapi_codecs[] = {
/* Profiles */
static const GstVaapiProfileMap gst_vaapi_profiles[] = {
{GST_VAAPI_PROFILE_MPEG2_SIMPLE, VAProfileMPEG2Simple,
"video/mpeg, mpegversion=2", "simple"
},
"video/mpeg, mpegversion=2", "simple"},
{GST_VAAPI_PROFILE_MPEG2_MAIN, VAProfileMPEG2Main,
"video/mpeg, mpegversion=2", "main"
},
"video/mpeg, mpegversion=2", "main"},
{GST_VAAPI_PROFILE_MPEG4_SIMPLE, VAProfileMPEG4Simple,
"video/mpeg, mpegversion=4", "simple"
},
"video/mpeg, mpegversion=4", "simple"},
{GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
"video/mpeg, mpegversion=4", "advanced-simple"
},
"video/mpeg, mpegversion=4", "advanced-simple"},
{GST_VAAPI_PROFILE_MPEG4_MAIN, VAProfileMPEG4Main,
"video/mpeg, mpegversion=4", "main"
},
"video/mpeg, mpegversion=4", "main"},
{GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
"video/x-divx, divxversion=5", "advanced-simple"
},
"video/x-divx, divxversion=5", "advanced-simple"},
{GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
"video/x-xvid", "advanced-simple"
},
"video/x-xvid", "advanced-simple"},
#if VA_CHECK_VERSION(0,30,0)
{GST_VAAPI_PROFILE_H263_BASELINE, VAProfileH263Baseline,
"video/x-h263, variant=itu, h263version=h263", "baseline"
},
"video/x-h263, variant=itu, h263version=h263", "baseline"},
#endif
{GST_VAAPI_PROFILE_H264_BASELINE, VAProfileH264Baseline,
"video/x-h264", "baseline"
},
"video/x-h264", "baseline"},
#if VA_CHECK_VERSION(0,31,1)
{GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE,
VAProfileH264ConstrainedBaseline,
"video/x-h264", "constrained-baseline"
},
"video/x-h264", "constrained-baseline"},
#endif
{GST_VAAPI_PROFILE_H264_MAIN, VAProfileH264Main,
"video/x-h264", "main"
},
"video/x-h264", "main"},
{GST_VAAPI_PROFILE_H264_HIGH, VAProfileH264High,
"video/x-h264", "high"
},
"video/x-h264", "high"},
#if VA_CHECK_VERSION(0,35,2)
{GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH, VAProfileH264MultiviewHigh,
"video/x-h264", "multiview-high"
},
"video/x-h264", "multiview-high"},
{GST_VAAPI_PROFILE_H264_STEREO_HIGH, VAProfileH264StereoHigh,
"video/x-h264", "stereo-high"
},
"video/x-h264", "stereo-high"},
#endif
{GST_VAAPI_PROFILE_VC1_SIMPLE, VAProfileVC1Simple,
"video/x-wmv, wmvversion=3", "simple"
},
"video/x-wmv, wmvversion=3", "simple"},
{GST_VAAPI_PROFILE_VC1_MAIN, VAProfileVC1Main,
"video/x-wmv, wmvversion=3", "main"
},
"video/x-wmv, wmvversion=3", "main"},
{GST_VAAPI_PROFILE_VC1_ADVANCED, VAProfileVC1Advanced,
"video/x-wmv, wmvversion=3, format=(string)WVC1", "advanced"
},
"video/x-wmv, wmvversion=3, format=(string)WVC1", "advanced"},
#if VA_CHECK_VERSION(0,32,0)
{GST_VAAPI_PROFILE_JPEG_BASELINE, VAProfileJPEGBaseline,
"image/jpeg", NULL
},
"image/jpeg", NULL},
#endif
#if VA_CHECK_VERSION(0,35,0)
{GST_VAAPI_PROFILE_VP8, VAProfileVP8Version0_3,
"video/x-vp8", NULL
},
"video/x-vp8", NULL},
#endif
#if VA_CHECK_VERSION(0,37,0)
{GST_VAAPI_PROFILE_H265_MAIN, VAProfileHEVCMain,
"video/x-h265", "main"
},
"video/x-h265", "main"},
{GST_VAAPI_PROFILE_H265_MAIN10, VAProfileHEVCMain10,
"video/x-h265", "main-10"
},
"video/x-h265", "main-10"},
#endif
#if VA_CHECK_VERSION(0,38,0)
{GST_VAAPI_PROFILE_VP9, VAProfileVP9Profile0,
"video/x-vp9", NULL
},
"video/x-vp9", NULL},
#endif
{0,}
};
@ -295,13 +276,18 @@ gst_vaapi_profile_from_codec_data_h264(GstBuffer *buffer)
return 0;
switch (buf[1]) { /* AVCProfileIndication */
case 66: return ((buf[2] & 0x40) ?
case 66:
return ((buf[2] & 0x40) ?
GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE :
GST_VAAPI_PROFILE_H264_BASELINE);
case 77: return GST_VAAPI_PROFILE_H264_MAIN;
case 100: return GST_VAAPI_PROFILE_H264_HIGH;
case 118: return GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH;
case 128: return GST_VAAPI_PROFILE_H264_STEREO_HIGH;
case 77:
return GST_VAAPI_PROFILE_H264_MAIN;
case 100:
return GST_VAAPI_PROFILE_H264_HIGH;
case 118:
return GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH;
case 128:
return GST_VAAPI_PROFILE_H264_STEREO_HIGH;
}
return 0;
@ -323,9 +309,12 @@ gst_vaapi_profile_from_codec_data_h265(GstBuffer *buffer)
return 0;
switch (buf[1] & 0x1f) { /* HEVCProfileIndication */
case 1: return GST_VAAPI_PROFILE_H265_MAIN;
case 2: return GST_VAAPI_PROFILE_H265_MAIN10;
case 3: return GST_VAAPI_PROFILE_H265_MAIN_STILL_PICTURE;
case 1:
return GST_VAAPI_PROFILE_H265_MAIN;
case 2:
return GST_VAAPI_PROFILE_H265_MAIN10;
case 3:
return GST_VAAPI_PROFILE_H265_MAIN_STILL_PICTURE;
}
return 0;
}
@ -405,13 +394,11 @@ gst_vaapi_profile_from_caps(const GstCaps *caps)
profile = best_profile;
}
if (!profile) {
profile = gst_vaapi_profile_from_codec_data(
gst_vaapi_profile_get_codec(m->profile),
codec_data
);
if (!profile &&
WORKAROUND_QTDEMUX_NO_H263_PROFILES &&
strncmp(name, "video/x-h263", namelen) == 0) {
profile =
gst_vaapi_profile_from_codec_data (gst_vaapi_profile_get_codec
(m->profile), codec_data);
if (!profile && WORKAROUND_QTDEMUX_NO_H263_PROFILES
&& strncmp (name, "video/x-h263", namelen) == 0) {
/* HACK: qtdemux does not report profiles for h263 */
profile = m->profile;
}
@ -464,11 +451,7 @@ gst_vaapi_profile_get_caps(GstVaapiProfile profile)
caps = gst_caps_from_string (m->media_str);
if (!caps)
continue;
gst_caps_set_simple(
caps,
"profile", G_TYPE_STRING, m->profile_str,
NULL
);
gst_caps_set_simple (caps, "profile", G_TYPE_STRING, m->profile_str, NULL);
out_caps = gst_caps_merge (out_caps, caps);
}
return out_caps;

View file

@ -45,7 +45,8 @@ typedef struct _GstVaapiSubpictureClass GstVaapiSubpictureClass;
*
* A VA subpicture wrapper
*/
struct _GstVaapiSubpicture {
struct _GstVaapiSubpicture
{
/*< private > */
GstVaapiObject parent_instance;
@ -59,7 +60,8 @@ struct _GstVaapiSubpicture {
*
* A VA subpicture wrapper class
*/
struct _GstVaapiSubpictureClass {
struct _GstVaapiSubpictureClass
{
/*< private > */
GstVaapiObjectClass parent_class;
};
@ -78,10 +80,8 @@ gst_vaapi_subpicture_destroy(GstVaapiSubpicture *subpicture)
if (subpicture_id != VA_INVALID_ID) {
if (display) {
GST_VAAPI_DISPLAY_LOCK (display);
status = vaDestroySubpicture(
GST_VAAPI_DISPLAY_VADISPLAY(display),
subpicture_id
);
status = vaDestroySubpicture (GST_VAAPI_DISPLAY_VADISPLAY (display),
subpicture_id);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaDestroySubpicture()"))
g_warning ("failed to destroy subpicture %" GST_VAAPI_ID_FORMAT,
@ -101,11 +101,8 @@ gst_vaapi_subpicture_create(GstVaapiSubpicture *subpicture,
VAStatus status;
GST_VAAPI_DISPLAY_LOCK (display);
status = vaCreateSubpicture(
GST_VAAPI_DISPLAY_VADISPLAY(display),
GST_VAAPI_OBJECT_ID(image),
&subpicture_id
);
status = vaCreateSubpicture (GST_VAAPI_DISPLAY_VADISPLAY (display),
GST_VAAPI_OBJECT_ID (image), &subpicture_id);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaCreateSubpicture()"))
return FALSE;
@ -130,8 +127,8 @@ GST_VAAPI_OBJECT_DEFINE_CLASS(GstVaapiSubpicture, gst_vaapi_subpicture)
*
* Return value: the newly allocated #GstVaapiSubpicture object
*/
GstVaapiSubpicture *
gst_vaapi_subpicture_new(GstVaapiImage *image, guint flags)
GstVaapiSubpicture *gst_vaapi_subpicture_new (GstVaapiImage * image,
guint flags)
{
GstVaapiSubpicture *subpicture;
GstVaapiDisplay *display;
@ -177,10 +174,8 @@ error:
* Return value: the newly allocated #GstVaapiSubpicture object
*/
GstVaapiSubpicture *
gst_vaapi_subpicture_new_from_overlay_rectangle(
GstVaapiDisplay *display,
GstVideoOverlayRectangle *rect
)
gst_vaapi_subpicture_new_from_overlay_rectangle (GstVaapiDisplay * display,
GstVideoOverlayRectangle * rect)
{
GstVaapiSubpicture *subpicture;
GstVideoFormat format;
@ -205,8 +200,10 @@ gst_vaapi_subpicture_new_from_overlay_rectangle(
if (!gst_vaapi_display_has_subpicture_format (display, format, &hw_flags))
return NULL;
flags = hw_flags & from_GstVideoOverlayFormatFlags(
gst_video_overlay_rectangle_get_flags(rect));
flags =
hw_flags &
from_GstVideoOverlayFormatFlags (gst_video_overlay_rectangle_get_flags
(rect));
buffer = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect,
to_GstVideoOverlayFormatFlags (flags));
@ -367,11 +364,8 @@ gst_vaapi_subpicture_set_global_alpha(GstVaapiSubpicture *subpicture,
display = GST_VAAPI_OBJECT_DISPLAY (subpicture);
GST_VAAPI_DISPLAY_LOCK (display);
status = vaSetSubpictureGlobalAlpha(
GST_VAAPI_DISPLAY_VADISPLAY(display),
GST_VAAPI_OBJECT_ID(subpicture),
global_alpha
);
status = vaSetSubpictureGlobalAlpha (GST_VAAPI_DISPLAY_VADISPLAY (display),
GST_VAAPI_OBJECT_ID (subpicture), global_alpha);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaSetSubpictureGlobalAlpha()"))
return FALSE;