vdpau: more work on h264 decoder

This commit is contained in:
Carl-Anton Ingmarsson 2010-06-16 23:12:43 +02:00
parent 686d2d2303
commit f8846a463d
10 changed files with 762 additions and 80 deletions

View file

@ -12,6 +12,7 @@ libgstvdpau_la_SOURCES = \
h264/gstnalreader.c \
h264/gsth264parser.c \
h264/gstvdph264frame.c \
h264/gsth264dpb.c \
h264/gstvdph264dec.c
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
@ -34,4 +35,5 @@ noinst_HEADERS = \
h264/gstnalreader.h \
h264/gsth264parser.h \
h264/gstvdph264frame.h \
h264/gsth264dpb.h \
h264/gstvdph264dec.h

View file

@ -124,8 +124,6 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
static void
gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
{
GList *g;
GST_DEBUG ("reset");
base_video_decoder->discont = TRUE;
@ -152,13 +150,6 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
base_video_decoder->have_src_caps = FALSE;
for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) {
GstVideoFrame *frame = g->data;
gst_video_frame_unref (frame);
}
g_list_free (base_video_decoder->frames);
base_video_decoder->frames = NULL;
GST_OBJECT_LOCK (base_video_decoder);
base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
base_video_decoder->proportion = 0.5;
@ -1022,9 +1013,6 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
base_video_decoder->frames =
g_list_remove (base_video_decoder->frames, frame);
gst_base_video_decoder_set_src_caps (base_video_decoder);
if (base_video_decoder->sink_clipping) {
@ -1137,9 +1125,6 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
base_video_decoder->frames =
g_list_remove (base_video_decoder->frames, frame);
gst_video_frame_unref (frame);
}
@ -1171,9 +1156,6 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
GST_DEBUG ("dist %d", frame->distance_from_sync);
base_video_decoder->frames = g_list_append (base_video_decoder->frames,
frame);
running_time = gst_segment_to_running_time (&base_video_decoder->segment,
GST_FORMAT_TIME, frame->presentation_timestamp);
@ -1234,37 +1216,6 @@ gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder *
return base_video_decoder->current_frame;
}
GstVideoFrame *
gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *
base_video_decoder)
{
GList *g;
g = g_list_first (base_video_decoder->frames);
if (g == NULL)
return NULL;
return (GstVideoFrame *) (g->data);
}
GstVideoFrame *
gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
int frame_number)
{
GList *g;
for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) {
GstVideoFrame *frame = g->data;
if (frame->system_frame_number == frame_number) {
return frame;
}
}
return NULL;
}
void
gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder *
base_video_decoder)
@ -1295,7 +1246,6 @@ gst_base_video_decoder_get_property (GObject * object, guint property_id,
}
}
/* GObject vmethod implementations */
static void
gst_base_video_decoder_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)

View file

@ -95,8 +95,6 @@ struct _GstBaseVideoDecoder
GstPad *srcpad;
GstAdapter *input_adapter;
GList *frames;
gboolean have_sync;
gboolean discont;

263
sys/vdpau/h264/gsth264dpb.c Normal file
View file

@ -0,0 +1,263 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "gsth264dpb.h"
/* Properties */
enum
{
PROP_0,
PROP_NUM_REF_FRAMES
};
GST_DEBUG_CATEGORY_STATIC (h264dpb_debug);
#define GST_CAT_DEFAULT h264dpb_debug
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (h264dpb_debug, "h264dpb", 0, \
"H264 DPB");
G_DEFINE_TYPE_WITH_CODE (GstH264DPB, gst_h264_dpb, G_TYPE_OBJECT, DEBUG_INIT);
void
gst_h264_dpb_fill_reference_frames (GstH264DPB * dpb,
VdpReferenceFrameH264 reference_frames[16])
{
GstVdpH264Frame **frames;
guint i;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
GstVdpH264Frame *frame = frames[i];
reference_frames[i].surface =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME_CAST (frame)->src_buffer)->
surface;
reference_frames[i].is_long_term = frame->is_long_term;
reference_frames[i].top_is_reference = frame->is_reference;
reference_frames[i].bottom_is_reference = frame->is_reference;
reference_frames[i].field_order_cnt[0] = frame->poc;
reference_frames[i].field_order_cnt[1] = frame->poc;
reference_frames[i].frame_idx = frame->frame_num;
}
}
static void
gst_h264_dpb_output (GstH264DPB * dpb, guint idx)
{
GstVdpH264Frame *frame = dpb->frames[idx];
gst_video_frame_ref (GST_VIDEO_FRAME_CAST (frame));
dpb->output (dpb, frame);
if (!frame->is_reference) {
GstVdpH264Frame **frames;
guint i;
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (frame));
dpb->n_frames--;
frames = dpb->frames;
for (i = idx; i < dpb->n_frames; i++)
frames[i] = frames[i + 1];
}
}
static gboolean
gst_h264_dpb_bump (GstH264DPB * dpb, guint poc)
{
GstVdpH264Frame **frames;
guint i;
gint bump_idx;
frames = dpb->frames;
bump_idx = -1;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->output_needed) {
bump_idx = i;
break;
}
}
if (bump_idx != -1) {
for (i = bump_idx + 1; i < dpb->n_frames; i++) {
if (frames[i]->output_needed && (frames[i]->poc < frames[bump_idx]->poc)) {
bump_idx = i;
}
}
if (frames[bump_idx]->poc < poc) {
gst_h264_dpb_output (dpb, bump_idx);
return TRUE;
}
}
return FALSE;
}
gboolean
gst_h264_dpb_add (GstH264DPB * dpb, GstVdpH264Frame * h264_frame)
{
GstVdpH264Frame **frames;
GST_DEBUG ("add frame with poc: %d", h264_frame->poc);
frames = dpb->frames;
if (h264_frame->is_reference) {
while (dpb->n_frames == dpb->max_frames) {
if (!gst_h264_dpb_bump (dpb, G_MAXUINT)) {
GST_ERROR_OBJECT (dpb, "Couldn't make room in DPB");
return FALSE;
}
}
dpb->frames[dpb->n_frames++] = h264_frame;
}
else {
if (dpb->n_frames == dpb->max_frames) {
while (gst_h264_dpb_bump (dpb, h264_frame->poc));
dpb->output (dpb, h264_frame);
}
else
dpb->frames[dpb->n_frames++] = h264_frame;
}
return TRUE;
}
void
gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
{
GstVideoFrame **frames;
guint i;
if (output)
while (gst_h264_dpb_bump (dpb, G_MAXUINT));
frames = (GstVideoFrame **) dpb->frames;
for (i = 0; i < dpb->n_frames; i++)
gst_video_frame_unref (frames[i]);
dpb->n_frames = 0;
}
void
gst_h264_dpb_mark_sliding (GstH264DPB * dpb)
{
GstVdpH264Frame **frames;
guint i;
gint mark_idx;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term) {
mark_idx = i;
break;
}
}
if (mark_idx != -1) {
for (i = mark_idx; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term &&
frames[i]->frame_num < frames[mark_idx]->frame_num)
mark_idx = i;
}
frames[mark_idx]->is_reference = FALSE;
}
}
/* GObject vmethod implementations */
static void
gst_h264_dpb_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstH264DPB *dpb = GST_H264_DPB (object);
switch (property_id) {
case PROP_NUM_REF_FRAMES:
g_value_set_uint (value, dpb->max_frames);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
static void
gst_h264_dpb_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstH264DPB *dpb = GST_H264_DPB (object);
switch (property_id) {
case PROP_NUM_REF_FRAMES:
{
guint i;
dpb->max_frames = g_value_get_uint (value);
for (i = dpb->n_frames; i > dpb->max_frames; i--)
gst_h264_dpb_bump (dpb, G_MAXUINT);
break;
}
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
static void
gst_h264_dpb_finalize (GObject * object)
{
/* TODO: Add deinitalization code here */
G_OBJECT_CLASS (gst_h264_dpb_parent_class)->finalize (object);
}
static void
gst_h264_dpb_init (GstH264DPB * dpb)
{
dpb->n_frames = 0;
dpb->max_frames = MAX_DPB_SIZE;
}
static void
gst_h264_dpb_class_init (GstH264DPBClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gst_h264_dpb_finalize;
object_class->set_property = gst_h264_dpb_set_property;
object_class->get_property = gst_h264_dpb_get_property;
g_object_class_install_property (object_class, PROP_NUM_REF_FRAMES,
g_param_spec_uint ("num-ref-frames", "Num Ref Frames",
"How many reference frames the DPB should hold ",
0, 16, 16, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}

View file

@ -0,0 +1,74 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_H264_DPB_H_
#define _GST_H264_DPB_H_
#include <glib-object.h>
#include "../gstvdp/gstvdpvideobuffer.h"
#include "gstvdph264frame.h"
G_BEGIN_DECLS
#define MAX_DPB_SIZE 16
#define GST_TYPE_H264_DPB (gst_h264_dpb_get_type ())
#define GST_H264_DPB(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_DPB, GstH264DPB))
#define GST_H264_DPB_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_DPB, GstH264DPBClass))
#define GST_IS_H264_DPB(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_DPB))
#define GST_IS_H264_DPB_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264_DPB))
#define GST_H264_DPB_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264_DPB, GstH264DPBClass))
typedef struct _GstH264DPB GstH264DPB;
typedef struct _GstH264DPBClass GstH264DPBClass;
struct _GstH264DPB
{
GObject parent_instance;
GstVdpH264Frame *frames[MAX_DPB_SIZE];
guint n_frames;
guint max_frames;
void (*output) (GstH264DPB *dpb, GstVdpH264Frame *h264_frame);
};
struct _GstH264DPBClass
{
GObjectClass parent_class;
};
void
gst_h264_dpb_fill_reference_frames (GstH264DPB *dpb, VdpReferenceFrameH264 reference_frames[16]);
gboolean gst_h264_dpb_add (GstH264DPB *dpb, GstVdpH264Frame *h264_frame);
void gst_h264_dpb_flush (GstH264DPB *dpb, gboolean output);
void gst_h264_dpb_mark_sliding (GstH264DPB *dpb);
GType gst_h264_dpb_get_type (void) G_GNUC_CONST;
G_END_DECLS
#endif /* _GST_H264_DPB_H_ */

View file

@ -374,6 +374,7 @@ gst_h264_parser_parse_sequence (GstH264Parser * parser, guint8 * data,
seq->bit_depth_chroma_minus8 = 0;
memset (seq->scaling_lists_4x4, 16, 96);
memset (seq->scaling_lists_8x8, 16, 384);
seq->mb_adaptive_frame_field_flag = 0;
seq->frame_crop_left_offset = 0;
seq->frame_crop_right_offset = 0;
seq->frame_crop_top_offset = 0;
@ -417,7 +418,7 @@ gst_h264_parser_parse_sequence (GstH264Parser * parser, guint8 * data,
READ_UE_ALLOWED (&reader, seq->log2_max_frame_num_minus4, 0, 12);
/* calculate MaxFrameNum */
seq->MaxFrameNum = pow (2, seq->log2_max_frame_num_minus4 + 4);
seq->MaxFrameNum = 1 << (seq->log2_max_frame_num_minus4 + 4);
READ_UE_ALLOWED (&reader, seq->pic_order_cnt_type, 0, 2);
if (seq->pic_order_cnt_type == 0) {
@ -753,40 +754,39 @@ gst_h264_slice_parse_dec_ref_pic_marking (GstH264Slice * slice,
m = &slice->dec_ref_pic_marking;
if (slice->nal_unit.IdrPicFlag) {
if (slice->nal_unit.IdrPicFlag == 0) {
READ_UINT8 (reader, m->no_output_of_prior_pics_flag, 1);
READ_UINT8 (reader, m->long_term_reference_flag, 1);
} else {
READ_UINT8 (reader, m->adaptive_ref_pic_marking_mode_flag, 1);
if (m->adaptive_ref_pic_marking_mode_flag) {
guint8 memory_management_control_operation;
guint i = 0;
do {
READ_UE_ALLOWED (reader, memory_management_control_operation, 0, 6);
m->ref_pic_marking[i].memory_management_control_operation =
memory_management_control_operation;
if (memory_management_control_operation == 1 ||
memory_management_control_operation == 3) {
guint32 difference_of_pic_nums_minus1;
memory_management_control_operation == 3)
READ_UE (reader, m->ref_pic_marking[i].difference_of_pic_nums_minus1);
READ_UE (reader, difference_of_pic_nums_minus1);
}
if (memory_management_control_operation == 2) {
guint32 long_term_pic_num;
if (memory_management_control_operation == 2)
READ_UE (reader, m->ref_pic_marking[i].long_term_pic_num);
READ_UE (reader, long_term_pic_num);
}
if (memory_management_control_operation == 3 ||
memory_management_control_operation == 6) {
guint32 long_term_frame_idx;
memory_management_control_operation == 6)
READ_UE (reader, m->ref_pic_marking[i].long_term_frame_idx);
READ_UE (reader, long_term_frame_idx);
}
if (memory_management_control_operation == 4) {
guint32 max_long_term_frame_idx_plus1;
if (memory_management_control_operation == 4)
READ_UE (reader, m->ref_pic_marking[i].max_long_term_frame_idx_plus1);
READ_UE (reader, max_long_term_frame_idx_plus1);
}
i++;
}
while (memory_management_control_operation != 0);
m->n_ref_pic_marking = i;
}
}

View file

@ -183,7 +183,6 @@ struct _GstH264Sequence
guint32 pic_height_in_map_units_minus1;
guint8 frame_mbs_only_flag;
/* if !frame_mbs_only_flag */
guint8 mb_adaptive_frame_field_flag;
guint8 direct_8x8_inference_flag;
@ -256,6 +255,18 @@ struct _GstH264DecRefPicMarking
/* else */
guint8 adaptive_ref_pic_marking_mode_flag;
struct {
guint8 memory_management_control_operation;
union {
guint32 difference_of_pic_nums_minus1;
guint32 long_term_pic_num;
guint32 long_term_frame_idx;
guint32 max_long_term_frame_idx_plus1;
};
} ref_pic_marking[10];
guint8 n_ref_pic_marking;
};
struct _GstH264PredWeightTable

View file

@ -72,6 +72,21 @@ GST_BOILERPLATE_FULL (GstVdpH264Dec, gst_vdp_h264_dec, GstBaseVideoDecoder,
} \
}
static GstFlowReturn
gst_vdp_h264_dec_alloc_buffer (GstVdpH264Dec * h264_dec,
GstVdpVideoBuffer ** outbuf)
{
GstVdpVideoSrcPad *vdp_pad;
GstFlowReturn ret = GST_FLOW_OK;
vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (h264_dec);
ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf);
if (ret != GST_FLOW_OK)
return ret;
return GST_FLOW_OK;
}
static gboolean
gst_vdp_h264_dec_set_sink_caps (GstBaseVideoDecoder * base_video_decoder,
GstCaps * caps)
@ -165,25 +180,354 @@ gst_vdp_h264_dec_shape_output (GstBaseVideoDecoder * base_video_decoder,
return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf));
}
static void
gst_vdp_h264_dec_output (GstH264DPB * dpb, GstVdpH264Frame * h264_frame)
{
GST_DEBUG ("poc: %d", h264_frame->poc);
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (h264_frame));
}
static guint
gst_vdp_h264_dec_calculate_poc (GstVdpH264Dec * h264_dec, GstH264Slice * slice)
{
GstH264Picture *pic;
GstH264Sequence *seq;
guint poc;
pic = slice->picture;
seq = pic->sequence;
if (seq->pic_order_cnt_type == 0) {
guint32 max_poc_cnt_lsb = 1 << (seq->log2_max_pic_order_cnt_lsb_minus4 + 4);
if ((slice->pic_order_cnt_lsb < h264_dec->prev_poc_lsb) &&
((h264_dec->prev_poc_lsb - slice->pic_order_cnt_lsb) >=
(max_poc_cnt_lsb / 2)))
h264_dec->poc_msb = h264_dec->poc_msb + max_poc_cnt_lsb;
else if ((slice->pic_order_cnt_lsb > h264_dec->prev_poc_lsb) &&
((slice->pic_order_cnt_lsb - h264_dec->prev_poc_lsb) >
(max_poc_cnt_lsb / 2)))
h264_dec->poc_msb = h264_dec->poc_msb - max_poc_cnt_lsb;
poc = h264_dec->poc_msb + slice->pic_order_cnt_lsb;
h264_dec->prev_poc_lsb = slice->pic_order_cnt_lsb;
}
return poc;
}
static void
gst_vdp_h264_dec_init_frame_info (GstVdpH264Dec * h264_dec,
GstVdpH264Frame * h264_frame)
{
GstH264Slice *slice;
slice = &h264_frame->slice_hdr;
h264_frame->poc = gst_vdp_h264_dec_calculate_poc (h264_dec, slice);
h264_frame->is_long_term = FALSE;
/* is reference */
if (slice->nal_unit.ref_idc == 0)
h264_frame->is_reference = FALSE;
else if (slice->nal_unit.IdrPicFlag) {
h264_frame->is_reference = TRUE;
h264_frame->is_long_term =
slice->dec_ref_pic_marking.long_term_reference_flag;
} else {
if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag)
GST_ERROR ("FIXME: implement adaptive ref pic marking");
else
h264_frame->is_reference = TRUE;
}
}
static gboolean
gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstVdpH264Frame * h264_frame)
{
GstH264Slice *slice;
GstH264Sequence *seq;
h264_dec->poc_msb = 0;
h264_dec->prev_poc_lsb = 0;
slice = &h264_frame->slice_hdr;
if (slice->dec_ref_pic_marking.no_output_of_prior_pics_flag)
gst_h264_dpb_flush (h264_dec->dpb, FALSE);
else
gst_h264_dpb_flush (h264_dec->dpb, TRUE);
seq = slice->picture->sequence;
if (seq != h264_dec->sequence) {
GstVdpDevice *device;
gst_base_video_decoder_update_src_caps (GST_BASE_VIDEO_DECODER (h264_dec));
device = gst_vdp_video_src_pad_get_device
(GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD (h264_dec)));
if (device) {
GstVideoState *state;
VdpDecoderProfile profile;
VdpStatus status;
if (h264_dec->decoder != VDP_INVALID_HANDLE) {
device->vdp_decoder_destroy (h264_dec->decoder);
h264_dec->decoder = VDP_INVALID_HANDLE;
}
state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (h264_dec));
switch (seq->profile_idc) {
case 66:
profile = VDP_DECODER_PROFILE_H264_BASELINE;
break;
case 77:
profile = VDP_DECODER_PROFILE_H264_MAIN;
break;
case 100:
profile = VDP_DECODER_PROFILE_H264_HIGH;
break;
default:
return FALSE;
}
status = device->vdp_decoder_create (device->device, profile,
state->width, state->height, seq->num_ref_frames, &h264_dec->decoder);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (h264_dec, RESOURCE, READ,
("Could not create vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return FALSE;
}
} else
return FALSE;
g_object_set (h264_dec->dpb, "num-ref-frames", seq->num_ref_frames, NULL);
h264_dec->sequence = seq;
}
return TRUE;
}
static VdpPictureInfoH264
gst_vdp_h264_dec_fill_info (GstVdpH264Dec * h264_dec,
GstVdpH264Frame * h264_frame)
{
GstH264Slice *slice;
GstH264Picture *pic;
GstH264Sequence *seq;
VdpPictureInfoH264 info;
slice = &h264_frame->slice_hdr;
pic = slice->picture;
seq = pic->sequence;
info.slice_count = h264_frame->slices->len;
/* FIXME: we only handle frames for now */
info.field_order_cnt[0] = h264_frame->poc;
info.field_order_cnt[1] = h264_frame->poc;
info.is_reference = h264_frame->is_reference;
info.field_pic_flag = slice->field_pic_flag;
info.bottom_field_flag = slice->bottom_field_flag;
info.num_ref_frames = seq->num_ref_frames;
info.frame_mbs_only_flag = seq->frame_mbs_only_flag;
info.mb_adaptive_frame_field_flag = seq->mb_adaptive_frame_field_flag;
info.log2_max_frame_num_minus4 = seq->log2_max_frame_num_minus4;
info.pic_order_cnt_type = seq->pic_order_cnt_type;
info.log2_max_pic_order_cnt_lsb_minus4 =
seq->log2_max_pic_order_cnt_lsb_minus4;
info.delta_pic_order_always_zero_flag = seq->delta_pic_order_always_zero_flag;
info.direct_8x8_inference_flag = seq->direct_8x8_inference_flag;
info.constrained_intra_pred_flag = pic->constrained_intra_pred_flag;
info.weighted_pred_flag = pic->weighted_pred_flag;
info.weighted_bipred_idc = pic->weighted_bipred_idc;
info.transform_8x8_mode_flag = pic->transform_8x8_mode_flag;
info.chroma_qp_index_offset = pic->chroma_qp_index_offset;
info.second_chroma_qp_index_offset = pic->second_chroma_qp_index_offset;
info.pic_init_qp_minus26 = pic->pic_init_qp_minus26;
info.num_ref_idx_l0_active_minus1 = pic->num_ref_idx_l0_active_minus1;
info.num_ref_idx_l1_active_minus1 = pic->num_ref_idx_l1_active_minus1;
info.entropy_coding_mode_flag = pic->entropy_coding_mode_flag;
info.pic_order_present_flag = pic->pic_order_present_flag;
info.deblocking_filter_control_present_flag =
pic->deblocking_filter_control_present_flag;
info.redundant_pic_cnt_present_flag = pic->redundant_pic_cnt_present_flag;
memcpy (&info.scaling_lists_4x4, &pic->scaling_lists_4x4, 96);
memcpy (&info.scaling_lists_8x8, &pic->scaling_lists_8x8, 128);
gst_h264_dpb_fill_reference_frames (h264_dec->dpb, info.referenceFrames);
return info;
}
static VdpBitstreamBuffer *
gst_vdp_h264_dec_create_bitstream_buffers (GstVdpH264Dec * h264_dec,
GstVdpH264Frame * h264_frame, guint * n_bufs)
{
VdpBitstreamBuffer *bufs;
if (h264_dec->packetized) {
guint i;
bufs = g_new (VdpBitstreamBuffer, h264_frame->slices->len * 2);
*n_bufs = h264_frame->slices->len * 2;
for (i = 0; i < h264_frame->slices->len; i++) {
static const guint8 start_code[] = { 0x00, 0x00, 0x01 };
guint idx;
GstBuffer *buf;
idx = i * 2;
bufs[idx].bitstream = start_code;
bufs[idx].bitstream_bytes = 3;
bufs[idx].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
idx = idx + 1;
buf = GST_BUFFER_CAST (g_ptr_array_index (h264_frame->slices, i));
bufs[idx].bitstream = GST_BUFFER_DATA (buf) + h264_dec->nal_length_size;
bufs[idx].bitstream_bytes = GST_BUFFER_SIZE (buf) -
h264_dec->nal_length_size;
bufs[idx].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
}
}
else {
guint i;
bufs = g_new (VdpBitstreamBuffer, h264_frame->slices->len * 2);
*n_bufs = h264_frame->slices->len * 2;
for (i = 0; i < h264_frame->slices->len; i++) {
GstBuffer *buf;
buf = GST_BUFFER_CAST (g_ptr_array_index (h264_frame->slices, i));
bufs[i].bitstream = GST_BUFFER_DATA (buf);
bufs[i].bitstream_bytes = GST_BUFFER_SIZE (buf);
bufs[i].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
}
}
return bufs;
}
static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
GstVdpH264Frame *h264_frame;
GstH264Slice *slice;
GstH264Picture *pic;
GstH264Sequence *seq;
GstFlowReturn ret;
GstVdpVideoBuffer *outbuf;
VdpPictureInfoH264 info;
GstVdpDevice *device;
VdpVideoSurface surface;
VdpBitstreamBuffer *bufs;
guint n_bufs;
VdpStatus status;
GST_DEBUG ("handle_frame");
h264_frame = (GstVdpH264Frame *) frame;
GST_DEBUG ("frame_num: %d", h264_frame->slice_hdr.frame_num);
GST_DEBUG ("pic_order_cnt_type: %d",
h264_frame->slice_hdr.picture->sequence->pic_order_cnt_type);
GST_DEBUG ("pic_order_cnt_lsb: %d", h264_frame->slice_hdr.pic_order_cnt_lsb);
GST_DEBUG ("delta_pic_order_cnt_bottom: %d",
h264_frame->slice_hdr.delta_pic_order_cnt_bottom);
slice = &h264_frame->slice_hdr;
pic = slice->picture;
seq = pic->sequence;
if (slice->nal_unit.IdrPicFlag) {
if (gst_vdp_h264_dec_idr (h264_dec, h264_frame))
h264_dec->got_idr = TRUE;
else {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
}
/* check if we've got a IDR frame yet */
if (!h264_dec->got_idr) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
}
gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame);
/* decoding */
if ((ret = gst_vdp_h264_dec_alloc_buffer (h264_dec, &outbuf) != GST_FLOW_OK))
goto alloc_error;
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
info = gst_vdp_h264_dec_fill_info (h264_dec, h264_frame);
bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_frame,
&n_bufs);
status = device->vdp_decoder_render (h264_dec->decoder, surface,
(VdpPictureInfo *) & info, n_bufs, bufs);
g_free (bufs);
if (status != VDP_STATUS_OK)
goto decode_error;
frame->src_buffer = GST_BUFFER_CAST (outbuf);
/* DPB handling */
if (slice->nal_unit.ref_idc != 0 && !slice->nal_unit.IdrPicFlag) {
if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag)
GST_ERROR ("FIXME: implement adaptive ref pic marking");
else
gst_h264_dpb_mark_sliding (h264_dec->dpb);
}
gst_h264_dpb_add (h264_dec->dpb, h264_frame);
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_OK;
alloc_error:
GST_ERROR_OBJECT (h264_dec, "Could not allocate output buffer");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
decode_error:
GST_ELEMENT_ERROR (h264_dec, RESOURCE, READ,
("Could not decode"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
gst_buffer_unref (GST_BUFFER_CAST (outbuf));
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_ERROR;
}
static gint
@ -445,6 +789,11 @@ gst_vdp_h264_dec_create_srcpad (GstBaseVideoDecoder * base_video_decoder,
static gboolean
gst_vdp_h264_dec_flush (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
h264_dec->got_idr = FALSE;
gst_h264_dpb_flush (h264_dec->dpb, FALSE);
return TRUE;
}
@ -455,8 +804,15 @@ gst_vdp_h264_dec_start (GstBaseVideoDecoder * base_video_decoder)
h264_dec->packetized = FALSE;
h264_dec->nal_length_size = SYNC_CODE_SIZE;
h264_dec->got_idr = FALSE;
h264_dec->sequence = NULL;
h264_dec->parser = g_object_new (GST_TYPE_H264_PARSER, NULL);
h264_dec->dpb = g_object_new (GST_TYPE_H264_DPB, NULL);
h264_dec->dpb->output = gst_vdp_h264_dec_output;
return TRUE;
}
@ -465,7 +821,21 @@ gst_vdp_h264_dec_stop (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
GstVdpVideoSrcPad *vdp_pad;
GstVdpDevice *device;
g_object_unref (h264_dec->parser);
g_object_unref (h264_dec->dpb);
vdp_pad =
GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder));
if ((device = gst_vdp_video_src_pad_get_device (vdp_pad))) {
if (h264_dec->decoder != VDP_INVALID_HANDLE)
device->vdp_decoder_destroy (h264_dec->decoder);
}
return TRUE;
}

View file

@ -26,6 +26,7 @@
#include "../basevideodecoder/gstbasevideodecoder.h"
#include "gsth264parser.h"
#include "gsth264dpb.h"
G_BEGIN_DECLS
@ -44,10 +45,18 @@ typedef struct _GstVdpH264DecClass GstVdpH264DecClass;
struct _GstVdpH264Dec {
GstBaseVideoDecoder base_video_decoder;
GstBuffer *codec_data;
gboolean packetized;
guint8 nal_length_size;
GstH264Parser *parser;
GstH264DPB *dpb;
GstH264Sequence *sequence;
gboolean got_idr;
VdpDecoder decoder;
guint poc_msb;
guint prev_poc_lsb;
};
struct _GstVdpH264DecClass {

View file

@ -42,8 +42,13 @@ struct _GstVdpH264Frame
GstVideoFrame video_frame;
GstH264Slice slice_hdr;
GPtrArray *slices;
guint poc;
guint16 frame_num;
gboolean is_reference;
gboolean is_long_term;
gboolean output_needed;
};
struct _GstVdpH264FrameClass