vdpau: MPEG1 decoding know gives recognizable output

This commit is contained in:
Carl-Anton Ingmarsson 2009-03-31 22:53:40 +02:00 committed by Jan Schmidt
parent f70ddb6605
commit a727e6a022
7 changed files with 528 additions and 60 deletions

View file

@ -6,7 +6,8 @@ libgstvdpau_la_SOURCES = \
mpegutil.c
libgstvdpau_la_CFLAGS = $(GST_CFLAGS) $(X11_CFLAGS) -Ivdpau
libgstvdpau_la_LIBADD = $(GST_LIBS) $(X11_LIBS) -lvdpau
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) -lvdpau
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -24,7 +24,7 @@
#endif
#include <gst/gst.h>
#include <gst/controller/gstcontroller.h>
#include <gst/video/video.h>
#include "gstvdpaudecoder.h"
#include <vdpau/vdpau_x11.h>
@ -64,11 +64,12 @@ static void gst_vdpaudecoder_set_property (GObject * object, guint prop_id,
static void gst_vdpaudecoder_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
gboolean
gst_vdpaudecoder_push_video_surface (GstVdpauDecoder * dec,
GstFlowReturn
gst_vdpau_decoder_push_video_surface (GstVdpauDecoder * dec,
VdpVideoSurface surface)
{
VdpauFunctions *f;
GstBuffer *buffer;
f = dec->functions;
@ -77,34 +78,98 @@ gst_vdpaudecoder_push_video_surface (GstVdpauDecoder * dec,
{
gint size;
GstFlowReturn result;
GstBuffer *buffer;
VdpStatus status;
guint8 *data[3];
guint32 stride[3];
size = dec->height * dec->width + dec->height * dec->width / 2;
size =
gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, dec->width,
dec->height);
result =
gst_pad_alloc_buffer_and_set_caps (dec->src, GST_BUFFER_OFFSET_NONE,
size, GST_PAD_CAPS (dec->src), &buffer);
if (G_UNLIKELY (result != GST_FLOW_OK))
return FALSE;
return result;
data[0] = GST_BUFFER_DATA (buffer);
data[1] = data[0] + dec->height * dec->width;
data[2] = data[1] + dec->height * dec->width / 4;
data[0] = GST_BUFFER_DATA (buffer) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, dec->width, dec->height);
data[1] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, dec->width, dec->height);
data[2] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, dec->width, dec->height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, dec->width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, dec->width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, dec->width);
status =
f->vdp_video_surface_get_bits_ycbcr (surface, VDP_YCBCR_FORMAT_YV12,
(void *) data, NULL);
if (G_UNLIKELY (status != VDP_STATUS_OK))
return FALSE;
(void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
f->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
gint size;
GstFlowReturn result;
VdpStatus status;
guint8 *data[2];
guint32 stride[2];
size = dec->width * dec->height + dec->width * dec->height / 2;
result =
gst_pad_alloc_buffer_and_set_caps (dec->src, GST_BUFFER_OFFSET_NONE,
size, GST_PAD_CAPS (dec->src), &buffer);
if (G_UNLIKELY (result != GST_FLOW_OK))
return result;
data[0] = GST_BUFFER_DATA (buffer);
data[1] = data[0] + dec->width * dec->height;
stride[0] = dec->width;
stride[1] = dec->width;
status =
f->vdp_video_surface_get_bits_ycbcr (surface, VDP_YCBCR_FORMAT_NV12,
(void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
f->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
default:
break;
}
return TRUE;
GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (GST_SECOND * dec->frame_nr,
dec->framerate_denominator, dec->framerate_numerator);
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (GST_SECOND, dec->framerate_denominator,
dec->framerate_numerator);
GST_BUFFER_OFFSET (buffer) = dec->frame_nr;
dec->frame_nr++;
GST_BUFFER_OFFSET_END (buffer) = dec->frame_nr;
return gst_pad_push (dec->src, buffer);
}
typedef struct
@ -149,6 +214,38 @@ static VdpauFormats formats[6] = {
}
};
VdpVideoSurface
gst_vdpau_decoder_create_video_surface (GstVdpauDecoder * dec)
{
VdpauFunctions *f;
VdpChromaType chroma_type;
gint i;
VdpStatus status;
VdpVideoSurface surface;
f = dec->functions;
chroma_type = VDP_CHROMA_TYPE_422;
for (i = 0; i < 6; i++) {
if (formats[i].fourcc == dec->format) {
chroma_type = formats[i].chroma_type;
break;
}
}
status = f->vdp_video_surface_create (dec->device, chroma_type, dec->width,
dec->height, &surface);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't create a VdpVideoSurface"),
("Error returned from vdpau was: %s",
f->vdp_get_error_string (status)));
return VDP_INVALID_HANDLE;
}
return surface;
}
static GstCaps *
gst_vdpaudecoder_get_vdpau_support (GstVdpauDecoder * dec)
{
@ -355,6 +452,7 @@ gst_vdpaudecoder_sink_set_caps (GstPad * pad, GstCaps * caps)
GstStructure *structure;
gint width, height;
gint framerate_numerator, framerate_denominator;
gint par_numerator, par_denominator;
guint32 fourcc_format;
gboolean res;
@ -363,21 +461,25 @@ gst_vdpaudecoder_sink_set_caps (GstPad * pad, GstCaps * caps)
gst_structure_get_int (structure, "height", &height);
gst_structure_get_fraction (structure, "framerate",
&framerate_numerator, &framerate_denominator);
gst_structure_get_fraction (structure, "pixel-aspect-ratio",
&par_numerator, &par_denominator);
src_caps = gst_pad_get_allowed_caps (dec->src);
if (G_UNLIKELY (!src_caps))
return FALSE;
structure = gst_caps_get_structure (src_caps, 0);
new_caps = gst_caps_copy_nth (src_caps, 0);
gst_caps_unref (src_caps);
structure = gst_caps_get_structure (new_caps, 0);
gst_structure_get_fourcc (structure, "format", &fourcc_format);
gst_structure_set (structure,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_numerator,
framerate_denominator, NULL);
framerate_denominator,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_numerator,
par_denominator, NULL);
new_caps = gst_caps_copy_nth (src_caps, 0);
gst_caps_unref (src_caps);
gst_pad_fixate_caps (dec->src, new_caps);
res = gst_pad_set_caps (dec->src, new_caps);
@ -388,6 +490,8 @@ gst_vdpaudecoder_sink_set_caps (GstPad * pad, GstCaps * caps)
dec->width = width;
dec->height = height;
dec->framerate_numerator = framerate_numerator;
dec->framerate_denominator = framerate_denominator;
dec->format = fourcc_format;
if (dec_class->set_caps && !dec_class->set_caps (dec, caps))
@ -464,8 +568,12 @@ gst_vdpaudecoder_init (GstVdpauDecoder * dec, GstVdpauDecoderClass * klass)
dec->height = 0;
dec->width = 0;
dec->framerate_numerator = 0;
dec->framerate_denominator = 0;
dec->format = 0;
dec->frame_nr = 0;
dec->functions = g_slice_new0 (VdpauFunctions);
dec->src = gst_pad_new_from_static_template (&src_template, "src");

View file

@ -56,8 +56,11 @@ struct _GstVdpauDecoder {
GstCaps *src_caps;
gint width, height;
gint framerate_numerator, framerate_denominator;
guint32 format;
gint frame_nr;
gboolean silent;
};
@ -88,7 +91,9 @@ struct _VdpauFunctions {
GType gst_vdpaudecoder_get_type (void);
gboolean gst_vdpaudecoder_push_video_surface (GstVdpauDecoder * dec, VdpVideoSurface surface);
gboolean gst_vdpau_decoder_push_video_surface (GstVdpauDecoder * dec,
VdpVideoSurface surface);
VdpVideoSurface gst_vdpau_decoder_create_video_surface (GstVdpauDecoder *dec);
G_END_DECLS

View file

@ -59,6 +59,7 @@
#endif
#include <gst/gst.h>
#include <string.h>
#include "mpegutil.h"
#include "gstvdpaumpegdecoder.h"
@ -103,7 +104,9 @@ gst_vdpau_mpeg_decoder_set_caps (GstVdpauDecoder * dec, GstCaps * caps)
{
GstVdpauMpegDecoder *mpeg_dec;
GstStructure *structure;
gint version;
const GValue *value;
GstBuffer *codec_data;
MPEGSeqHdr hdr = { 0, };
VdpDecoderProfile profile;
VdpauFunctions *f;
VdpStatus status;
@ -111,19 +114,15 @@ gst_vdpau_mpeg_decoder_set_caps (GstVdpauDecoder * dec, GstCaps * caps)
mpeg_dec = GST_VDPAU_MPEG_DECODER (dec);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "mpegversion", &version);
if (version == 1)
gst_structure_get_int (structure, "mpegversion", &mpeg_dec->version);
if (mpeg_dec->version == 1)
profile = VDP_DECODER_PROFILE_MPEG1;
else {
const GValue *value;
GstBuffer *codec_data;
MPEGSeqHdr hdr = { 0, };
value = gst_structure_get_value (structure, "codec_data");
codec_data = gst_value_get_buffer (value);
mpeg_util_parse_sequence_hdr (&hdr, GST_BUFFER_DATA (codec_data),
GST_BUFFER_DATA (codec_data) + GST_BUFFER_SIZE (codec_data));
if (mpeg_dec->version != 1) {
switch (hdr.profile) {
case 5:
profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
@ -133,6 +132,10 @@ gst_vdpau_mpeg_decoder_set_caps (GstVdpauDecoder * dec, GstCaps * caps)
break;
}
}
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&hdr.intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&hdr.non_intra_quantizer_matrix, 64);
f = dec->functions;
status = f->vdp_decoder_create (dec->device, profile, dec->width,
@ -147,6 +150,228 @@ gst_vdpau_mpeg_decoder_set_caps (GstVdpauDecoder * dec, GstCaps * caps)
return TRUE;
}
static GstFlowReturn
gst_vdpau_mpeg_decoder_decode (GstVdpauMpegDecoder * mpeg_dec)
{
GstVdpauDecoder *dec;
GstBuffer *buffer;
VdpVideoSurface surface;
VdpauFunctions *f;
VdpBitstreamBuffer vbit[1];
VdpStatus status;
GstFlowReturn ret;
dec = GST_VDPAU_DECODER (mpeg_dec);
buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
gst_adapter_available (mpeg_dec->adapter));
if (mpeg_dec->vdp_info.picture_coding_type == P_FRAME) {
mpeg_dec->p_buffer = buffer;
}
surface =
gst_vdpau_decoder_create_video_surface (GST_VDPAU_DECODER (mpeg_dec));
f = dec->functions;
vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
vbit[0].bitstream = GST_BUFFER_DATA (buffer);
vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer);
status = f->vdp_decoder_render (mpeg_dec->decoder, surface,
(VdpPictureInfo *) & mpeg_dec->vdp_info, 1, vbit);
gst_buffer_unref (buffer);
mpeg_dec->vdp_info.slice_count = 0;
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
("Could not decode"),
("Error returned from vdpau was: %s",
f->vdp_get_error_string (status)));
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
f->vdp_video_surface_destroy (mpeg_dec->vdp_info.forward_reference);
f->vdp_video_surface_destroy (surface);
return GST_FLOW_ERROR;
}
ret =
gst_vdpau_decoder_push_video_surface (GST_VDPAU_DECODER (mpeg_dec),
surface);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
f->vdp_video_surface_destroy (mpeg_dec->vdp_info.forward_reference);
mpeg_dec->vdp_info.forward_reference = surface;
return ret;
}
static gboolean
gst_vdpau_mpeg_decoder_parse_picture_coding (GstVdpauMpegDecoder * mpeg_dec,
guint8 * data, guint8 * end)
{
GstVdpauDecoder *dec;
MPEGPictureExt pic_ext;
VdpPictureInfoMPEG1Or2 *info;
dec = GST_VDPAU_DECODER (mpeg_dec);
info = &mpeg_dec->vdp_info;
if (!mpeg_util_parse_picture_coding_extension (&pic_ext, data, end))
return FALSE;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4);
info->intra_dc_precision = pic_ext.intra_dc_precision;
info->picture_structure = pic_ext.picture_structure;
info->top_field_first = pic_ext.top_field_first;
info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct;
info->concealment_motion_vectors = pic_ext.concealment_motion_vectors;
info->q_scale_type = pic_ext.q_scale_type;
info->intra_vlc_format = pic_ext.intra_vlc_format;
return TRUE;
}
static gboolean
gst_vdpau_mpeg_decoder_parse_picture (GstVdpauMpegDecoder * mpeg_dec,
guint8 * data, guint8 * end)
{
GstVdpauDecoder *dec;
MPEGPictureHdr pic_hdr;
dec = GST_VDPAU_DECODER (mpeg_dec);
if (!mpeg_util_parse_picture_hdr (&pic_hdr, data, end))
return FALSE;
mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type;
if (pic_hdr.pic_type == I_FRAME &&
mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) {
dec->functions->vdp_video_surface_destroy (mpeg_dec->vdp_info.
forward_reference);
mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
}
if (mpeg_dec->version == 1) {
mpeg_dec->vdp_info.full_pel_forward_vector =
pic_hdr.full_pel_forward_vector;
mpeg_dec->vdp_info.full_pel_backward_vector =
pic_hdr.full_pel_backward_vector;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4);
}
return TRUE;
}
static gboolean
gst_vdpau_mpeg_decoder_parse_gop (GstVdpauMpegDecoder * mpeg_dec, guint8 * data,
guint8 * end)
{
MPEGPictureGOP gop;
if (!mpeg_util_parse_picture_gop (&gop, data, end))
return FALSE;
return TRUE;
}
static gboolean
gst_vdpau_mpeg_decoder_parse_quant_matrix (GstVdpauMpegDecoder * mpeg_dec,
guint8 * data, guint8 * end)
{
MPEGQuantMatrix qm;
if (!mpeg_util_parse_quant_matrix (&qm, data, end))
return FALSE;
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&qm.intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&qm.non_intra_quantizer_matrix, 64);
return TRUE;
}
static GstFlowReturn
gst_vdpau_mpeg_decoder_chain (GstPad * pad, GstBuffer * buffer)
{
GstVdpauMpegDecoder *mpeg_dec;
guint8 *data, *end;
guint32 sync_word = 0xffffffff;
mpeg_dec = GST_VDPAU_MPEG_DECODER (GST_OBJECT_PARENT (pad));
data = GST_BUFFER_DATA (buffer);
end = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer);
while ((data = mpeg_util_find_start_code (&sync_word, data, end))) {
guint8 *packet_start;
guint8 *packet_end;
packet_start = data - 3;
packet_end = mpeg_util_find_start_code (&sync_word, data, end);
if (packet_end)
packet_end -= 3;
else
packet_end = end;
if (data[0] >= MPEG_PACKET_SLICE_MIN && data[0] <= MPEG_PACKET_SLICE_MAX) {
GstBuffer *subbuf;
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");
subbuf =
gst_buffer_create_sub (buffer,
packet_start - GST_BUFFER_DATA (buffer), packet_end - packet_start);
gst_adapter_push (mpeg_dec->adapter, subbuf);
mpeg_dec->vdp_info.slice_count++;
} else if (mpeg_dec->vdp_info.slice_count > 0) {
if (gst_vdpau_mpeg_decoder_decode (mpeg_dec) != GST_FLOW_OK)
return GST_FLOW_ERROR;
}
switch (data[0]) {
case MPEG_PACKET_PICTURE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
gst_vdpau_mpeg_decoder_parse_picture (mpeg_dec, packet_start,
packet_end);
break;
case MPEG_PACKET_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
break;
case MPEG_PACKET_EXTENSION:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION");
switch (read_bits (data + 1, 0, 4)) {
case MPEG_PACKET_EXT_PICTURE_CODING:
gst_vdpau_mpeg_decoder_parse_picture_coding (mpeg_dec, packet_start,
packet_end);
break;
default:
break;
}
break;
case MPEG_PACKET_EXT_QUANT_MATRIX:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
gst_vdpau_mpeg_decoder_parse_quant_matrix (mpeg_dec, packet_start,
packet_end);
break;
case MPEG_PACKET_GOP:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
gst_vdpau_mpeg_decoder_parse_gop (mpeg_dec, packet_start, packet_end);
break;
default:
break;
}
}
return GST_FLOW_OK;
}
/* GObject vmethod implementations */
static void
@ -186,13 +411,34 @@ gst_vdpau_mpeg_decoder_class_init (GstVdpauMpegDecoderClass * klass)
vdpaudec_class->set_caps = gst_vdpau_mpeg_decoder_set_caps;
}
static void
gst_vdpau_mpeg_decoder_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
{
vdp_info->forward_reference = VDP_INVALID_HANDLE;
vdp_info->backward_reference = VDP_INVALID_HANDLE;
vdp_info->slice_count = 0;
vdp_info->picture_structure = 0;
vdp_info->picture_coding_type = 0;
vdp_info->intra_dc_precision = 0;
vdp_info->frame_pred_frame_dct = 0;
vdp_info->concealment_motion_vectors = 0;
}
static void
gst_vdpau_mpeg_decoder_init (GstVdpauMpegDecoder * mpeg_dec,
GstVdpauMpegDecoderClass * gclass)
{
mpeg_dec->silent = FALSE;
GstVdpauDecoder *dec;
dec = GST_VDPAU_DECODER (mpeg_dec);
mpeg_dec->silent = FALSE;
mpeg_dec->decoder = VDP_INVALID_HANDLE;
gst_vdpau_mpeg_decoder_init_info (&mpeg_dec->vdp_info);
mpeg_dec->adapter = gst_adapter_new ();
gst_pad_set_chain_function (dec->sink, gst_vdpau_mpeg_decoder_chain);
}
static void

View file

@ -45,6 +45,7 @@
#define __GST_VDPAU_MPEG_DECODER_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include "gstvdpaudecoder.h"
@ -66,7 +67,16 @@ struct _GstVdpauMpegDecoder
gboolean silent;
gint version;
VdpDecoder decoder;
VdpPictureInfoMPEG1Or2 vdp_info;
GstAdapter *adapter;
gint slices;
GstBuffer *p_buffer;
VdpPictureInfoMPEG1Or2 p_vdp_info;
};
struct _GstVdpauMpegDecoderClass

View file

@ -166,7 +166,6 @@ mpeg_util_parse_extension_packet (MPEGSeqHdr * hdr, guint8 * data, guint8 * end)
/* Parse a Sequence Extension */
guint8 horiz_size_ext, vert_size_ext;
guint8 fps_n_ext, fps_d_ext;
gint i, offset;
if (G_UNLIKELY ((end - data) < 6))
/* need at least 10 bytes, minus 4 for the start code 000001b5 */
@ -182,23 +181,6 @@ mpeg_util_parse_extension_packet (MPEGSeqHdr * hdr, guint8 * data, guint8 * end)
hdr->fps_d *= (fps_d_ext + 1);
hdr->width += (horiz_size_ext << 12);
hdr->height += (vert_size_ext << 12);
if (read_bits (data + 7, 6, 1)) {
for (i = 0; i < 64; i++)
hdr->intra_quantizer_matrix[mpeg2_scan[i]] =
read_bits (data + 7 + i, 7, 8);
offset = 64;
} else
memcpy (hdr->intra_quantizer_matrix, default_intra_quantizer_matrix,
64);
if (read_bits (data + 7 + offset, 7, 1)) {
for (i = 0; i < 64; i++)
hdr->non_intra_quantizer_matrix[mpeg2_scan[i]] =
read_bits (data + 8 + offset + i, 0, 8);
} else
memset (hdr->non_intra_quantizer_matrix, 0, 64);
break;
}
default:
@ -217,6 +199,7 @@ mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, guint8 * data, guint8 * end)
gboolean constrained_flag;
gboolean load_intra_flag;
gboolean load_non_intra_flag;
gint i;
if (G_UNLIKELY ((end - data) < 12))
return FALSE; /* Too small to be a sequence header */
@ -241,19 +224,29 @@ mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, guint8 * data, guint8 * end)
set_fps_from_code (hdr, fps_idx);
constrained_flag = (data[7] >> 2) & 0x01;
load_intra_flag = (data[7] >> 1) & 0x01;
load_intra_flag = read_bits (data + 7, 6, 1);
if (load_intra_flag) {
if (G_UNLIKELY ((end - data) < 64))
return FALSE;
data += 64;
for (i = 0; i < 64; i++) {
hdr->intra_quantizer_matrix[mpeg2_scan[i]] =
read_bits (data + 7 + i, 7, 8);
}
data += 64;
load_non_intra_flag = data[7] & 0x01;
} else
memcpy (hdr->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
load_non_intra_flag = read_bits (data + 7, 7 + load_intra_flag, 1);
if (load_non_intra_flag) {
if (G_UNLIKELY ((end - data) < 64))
return FALSE;
data += 64;
}
for (i = 0; i < 64; i++)
hdr->non_intra_quantizer_matrix[mpeg2_scan[i]] =
read_bits (data + 8 + i, 1 + load_intra_flag, 8);
} else
memset (hdr->non_intra_quantizer_matrix, 16, 64);
/* Advance past the rest of the MPEG-1 header */
data += 8;
@ -282,14 +275,14 @@ mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, guint8 * data, guint8 * end)
{
guint32 code;
if (G_UNLIKELY ((end - data) < 6))
if (G_UNLIKELY ((end - data) < 8))
return FALSE; /* Packet too small */
code = GST_READ_UINT32_BE (data);
if (G_UNLIKELY (code != (0x00000100 | MPEG_PACKET_PICTURE)))
return FALSE;
/* Skip the start code */
/* Skip the sync word */
data += 4;
hdr->pic_type = (data[1] >> 3) & 0x07;
@ -297,7 +290,7 @@ mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, guint8 * data, guint8 * end)
return FALSE; /* Corrupted picture packet */
if (hdr->pic_type == P_FRAME || hdr->pic_type == B_FRAME) {
if (G_UNLIKELY ((end - data) < 7))
if (G_UNLIKELY ((end - data) < 5))
return FALSE; /* packet too small */
hdr->full_pel_forward_vector = read_bits (data + 3, 5, 1);
@ -319,12 +312,19 @@ gboolean
mpeg_util_parse_picture_coding_extension (MPEGPictureExt * ext, guint8 * data,
guint8 * end)
{
if (G_UNLIKELY ((end - data) < 7))
guint32 code;
if (G_UNLIKELY ((end - data) < 10))
return FALSE; /* Packet too small */
if (G_UNLIKELY (read_bits (data, 0, 4) != MPEG_PACKET_EXT_PICTURE_CODING))
code = GST_READ_UINT32_BE (data);
if (G_UNLIKELY (G_UNLIKELY (code != (0x00000100 | MPEG_PACKET_EXTENSION))))
return FALSE;
/* Skip the sync word */
data += 4;
ext->f_code[0][0] = read_bits (data, 4, 4);
ext->f_code[0][1] = read_bits (data + 1, 0, 4);
ext->f_code[1][0] = read_bits (data + 1, 4, 4);
@ -340,3 +340,78 @@ mpeg_util_parse_picture_coding_extension (MPEGPictureExt * ext, guint8 * data,
return TRUE;
}
gboolean
mpeg_util_parse_picture_gop (MPEGPictureGOP * gop, guint8 * data, guint8 * end)
{
guint32 code;
gint hour, minute, second;
if (G_UNLIKELY ((end - data) < 8))
return FALSE; /* Packet too small */
code = GST_READ_UINT32_BE (data);
if (G_UNLIKELY (G_UNLIKELY (code != (0x00000100 | MPEG_PACKET_GOP))))
return FALSE;
/* Skip the sync word */
data += 4;
gop->drop_frame_flag = read_bits (data, 0, 1);
hour = read_bits (data, 1, 5);
minute = read_bits (data, 6, 6);
second = read_bits (data + 1, 4, 6);
gop->timestamp = hour * 3600 * GST_SECOND;
gop->timestamp += minute * 60 * GST_SECOND;
gop->timestamp += second * GST_SECOND;
gop->frame = read_bits (data + 2, 3, 6);
return TRUE;
}
gboolean
mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, guint8 * data, guint8 * end)
{
guint32 code;
gboolean load_intra_flag, load_non_intra_flag;
gint i;
if (G_UNLIKELY ((end - data) < 5))
return FALSE; /* Packet too small */
code = GST_READ_UINT32_BE (data);
if (G_UNLIKELY (G_UNLIKELY (code != (0x00000100 | MPEG_PACKET_GOP))))
return FALSE;
/* Skip the sync word */
data += 4;
load_intra_flag = read_bits (data, 0, 1);
if (load_intra_flag) {
if (G_UNLIKELY ((end - data) < 64))
return FALSE;
for (i = 0; i < 64; i++) {
qm->intra_quantizer_matrix[mpeg2_scan[i]] = read_bits (data + i, 1, 8);
}
data += 64;
} else
memcpy (qm->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
load_non_intra_flag = read_bits (data, 1 + load_intra_flag, 1);
if (load_non_intra_flag) {
if (G_UNLIKELY ((end - data) < 64))
return FALSE;
for (i = 0; i < 64; i++)
qm->non_intra_quantizer_matrix[mpeg2_scan[i]] =
read_bits (data + i, 2 + load_intra_flag, 8);
} else
memset (qm->non_intra_quantizer_matrix, 16, 64);
return TRUE;
}

View file

@ -26,6 +26,8 @@
typedef struct MPEGSeqHdr MPEGSeqHdr;
typedef struct MPEGPictureHdr MPEGPictureHdr;
typedef struct MPEGPictureExt MPEGPictureExt;
typedef struct MPEGPictureGOP MPEGPictureGOP;
typedef struct MPEGQuantMatrix MPEGQuantMatrix;
/* Packet ID codes for different packet types we
* care about */
@ -90,6 +92,20 @@ struct MPEGPictureExt
guint8 intra_vlc_format;
};
struct MPEGPictureGOP
{
guint8 drop_frame_flag;
guint8 frame;
GstClockTime timestamp;
};
struct MPEGQuantMatrix
{
guint8 intra_quantizer_matrix[64];
guint8 non_intra_quantizer_matrix[64];
};
gboolean mpeg_util_parse_sequence_hdr (MPEGSeqHdr *hdr,
guint8 *data, guint8 *end);
@ -97,4 +113,11 @@ gboolean mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, guint8 * data, guint
gboolean mpeg_util_parse_picture_coding_extension (MPEGPictureExt *ext, guint8 *data, guint8 *end);
gboolean mpeg_util_parse_picture_gop (MPEGPictureGOP * gop, guint8 * data, guint8 * end);
gboolean mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, guint8 * data, guint8 * end);
guint8 *mpeg_util_find_start_code (guint32 * sync_word, guint8 * cur, guint8 * end);
guint32 read_bits (guint8 * buf, gint start_bit, gint n_bits);
#endif