mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-18 15:51:11 +00:00
vdpau: remove gst_base_video_decoder_get_current_frame
we now pass the current frame to GstBaseVideoDecoder::parse_data also fixup some errors in vdpaumpeg4dec so that it now gives correct output
This commit is contained in:
parent
08e6a65f08
commit
885845a678
10 changed files with 51 additions and 63 deletions
|
@ -761,7 +761,7 @@ lost_sync:
|
|||
dec->current_buf_offset = dec->input_offset -
|
||||
gst_adapter_available (dec->input_adapter);
|
||||
|
||||
ret = klass->parse_data (dec, buf, at_eos);
|
||||
ret = klass->parse_data (dec, buf, at_eos, dec->current_frame);
|
||||
if (ret != GST_FLOW_OK)
|
||||
return ret;
|
||||
|
||||
|
@ -836,16 +836,14 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
gst_base_video_decoder_flush (base_video_decoder);
|
||||
}
|
||||
|
||||
if (base_video_decoder->current_frame == NULL) {
|
||||
base_video_decoder->current_frame =
|
||||
gst_base_video_decoder_new_frame (base_video_decoder);
|
||||
}
|
||||
|
||||
base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
|
||||
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
|
||||
gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
|
||||
}
|
||||
|
||||
if (!base_video_decoder->current_frame)
|
||||
base_video_decoder->current_frame =
|
||||
gst_base_video_decoder_new_frame (base_video_decoder);
|
||||
|
||||
if (base_video_decoder->packetized) {
|
||||
base_video_decoder->current_frame->sink_buffer = buf;
|
||||
|
@ -892,6 +890,8 @@ gst_base_video_decoder_start (GstBaseVideoDecoder * base_video_decoder)
|
|||
|
||||
gst_base_video_decoder_reset (base_video_decoder);
|
||||
|
||||
gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);
|
||||
|
||||
if (base_video_decoder_class->start)
|
||||
return base_video_decoder_class->start (base_video_decoder);
|
||||
|
||||
|
@ -1175,7 +1175,7 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
|
||||
/* do something with frame */
|
||||
ret = klass->handle_frame (base_video_decoder, frame, deadline);
|
||||
if (!GST_FLOW_IS_SUCCESS (ret)) {
|
||||
if (ret != GST_FLOW_OK) {
|
||||
GST_DEBUG ("flow error!");
|
||||
}
|
||||
|
||||
|
@ -1219,13 +1219,6 @@ gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
|
|||
base_video_decoder->have_sync = FALSE;
|
||||
}
|
||||
|
||||
GstVideoFrame *
|
||||
gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder *
|
||||
base_video_decoder)
|
||||
{
|
||||
return base_video_decoder->current_frame;
|
||||
}
|
||||
|
||||
/* GObject vmethod implementations */
|
||||
static void
|
||||
gst_base_video_decoder_get_property (GObject * object, guint property_id,
|
||||
|
@ -1362,11 +1355,6 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
|
|||
|
||||
base_video_decoder->input_adapter = gst_adapter_new ();
|
||||
|
||||
gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);
|
||||
|
||||
base_video_decoder->current_frame =
|
||||
gst_base_video_decoder_new_frame (base_video_decoder);
|
||||
|
||||
/* properties */
|
||||
base_video_decoder->packetized = FALSE;
|
||||
base_video_decoder->sink_clipping = TRUE;
|
||||
|
|
|
@ -140,7 +140,7 @@ struct _GstBaseVideoDecoderClass
|
|||
(GstBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos);
|
||||
|
||||
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder,
|
||||
GstBuffer *buf, gboolean at_eos);
|
||||
GstBuffer *buf, gboolean at_eos, GstVideoFrame *frame);
|
||||
|
||||
|
||||
GstVideoFrame *(*create_frame) (GstBaseVideoDecoder *coder);
|
||||
|
@ -153,9 +153,6 @@ struct _GstBaseVideoDecoderClass
|
|||
|
||||
GType gst_base_video_decoder_get_type (void);
|
||||
|
||||
GstVideoFrame *gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder
|
||||
*base_video_decoder);
|
||||
|
||||
GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
|
||||
gint frame_number);
|
||||
GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder);
|
||||
|
|
|
@ -419,8 +419,8 @@ gst_vdp_h264_dec_create_bitstream_buffers (GstVdpH264Dec * h264_dec,
|
|||
else {
|
||||
guint i;
|
||||
|
||||
bufs = g_new (VdpBitstreamBuffer, h264_frame->slices->len * 2);
|
||||
*n_bufs = h264_frame->slices->len * 2;
|
||||
bufs = g_new (VdpBitstreamBuffer, h264_frame->slices->len);
|
||||
*n_bufs = h264_frame->slices->len;
|
||||
|
||||
for (i = 0; i < h264_frame->slices->len; i++) {
|
||||
GstBuffer *buf;
|
||||
|
@ -454,7 +454,7 @@ gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
|
||||
GST_DEBUG ("handle_frame");
|
||||
|
||||
h264_frame = (GstH264Frame *) frame;
|
||||
h264_frame = GST_H264_FRAME_CAST (frame);
|
||||
|
||||
slice = &h264_frame->slice_hdr;
|
||||
pic = slice->picture;
|
||||
|
@ -644,7 +644,7 @@ gst_vdp_h264_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
|
|||
|
||||
static GstFlowReturn
|
||||
gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
||||
GstBuffer * buf, gboolean at_eos)
|
||||
GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
|
||||
{
|
||||
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
|
||||
GstBitReader reader;
|
||||
|
@ -655,7 +655,6 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
|||
guint size;
|
||||
gint i;
|
||||
|
||||
GstVideoFrame *frame;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
|
||||
|
@ -696,8 +695,6 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
|||
i--;
|
||||
}
|
||||
|
||||
frame = gst_base_video_decoder_get_current_frame (base_video_decoder);
|
||||
|
||||
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
|
||||
if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
|
||||
nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_AU_DELIMITER ||
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
# include <config.h>
|
||||
#include <config.h>
|
||||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
|
@ -401,7 +401,7 @@ gst_vdp_mpeg_dec_create_frame (GstBaseVideoDecoder * base_video_decoder)
|
|||
|
||||
static GstFlowReturn
|
||||
gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
||||
GstBuffer * buf, gboolean at_eos)
|
||||
GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
|
||||
{
|
||||
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);
|
||||
|
||||
|
@ -417,8 +417,7 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
|||
/* start_code */
|
||||
gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8);
|
||||
|
||||
mpeg_frame = (GstVdpMpegFrame *)
|
||||
gst_base_video_decoder_get_current_frame (base_video_decoder);
|
||||
mpeg_frame = GST_VDP_MPEG_FRAME_CAST (frame);
|
||||
|
||||
if (start_code >= MPEG_PACKET_SLICE_MIN
|
||||
&& start_code <= MPEG_PACKET_SLICE_MAX) {
|
||||
|
@ -511,6 +510,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
|||
gst_buffer_unref (buf);
|
||||
}
|
||||
|
||||
if (at_eos && mpeg_frame->slices)
|
||||
ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
|
||||
|
||||
done:
|
||||
mpeg_dec->prev_packet = start_code;
|
||||
|
||||
|
|
|
@ -45,7 +45,8 @@ gst_vdp_mpeg_frame_new (void)
|
|||
{
|
||||
GstVdpMpegFrame *frame;
|
||||
|
||||
frame = (GstVdpMpegFrame *) gst_mini_object_new (GST_TYPE_VDP_MPEG_FRAME);
|
||||
frame =
|
||||
GST_VDP_MPEG_FRAME_CAST (gst_mini_object_new (GST_TYPE_VDP_MPEG_FRAME));
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
|
|
@ -27,9 +27,10 @@
|
|||
|
||||
#include "../basevideodecoder/gstvideoframe.h"
|
||||
|
||||
#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type())
|
||||
#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME))
|
||||
#define GST_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_MPEG_FRAME, GstVdpMpegFrame))
|
||||
#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type())
|
||||
#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME))
|
||||
#define GST_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_MPEG_FRAME, GstVdpMpegFrame))
|
||||
#define GST_VDP_MPEG_FRAME_CAST(obj) ((GstVdpMpegFrame *)obj)
|
||||
|
||||
typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo;
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ struct _GstMpeg4Frame
|
|||
GstBuffer *gov_buf;
|
||||
GstBuffer *vop_buf;
|
||||
|
||||
GstClockTime vop_time;
|
||||
guint32 vop_time;
|
||||
};
|
||||
|
||||
struct _GstMpeg4FrameClass
|
||||
|
|
|
@ -35,6 +35,8 @@
|
|||
#include <config.h>
|
||||
#endif
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <vdpau/vdpau.h>
|
||||
#include <string.h>
|
||||
|
@ -84,25 +86,22 @@ gst_vdp_mpeg4_dec_fill_info (GstVdpMpeg4Dec * mpeg4_dec,
|
|||
}
|
||||
|
||||
if (vop->coding_type == B_VOP) {
|
||||
guint32 p_field_period;
|
||||
gint32 trd, trb;
|
||||
guint32 trd_time, trb_time;
|
||||
|
||||
p_field_period = mpeg4_dec->f_frame->vop_time / mpeg4_dec->tframe;
|
||||
trd_time = mpeg4_dec->b_frame->vop_time - mpeg4_dec->f_frame->vop_time;
|
||||
trb_time = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;
|
||||
|
||||
trd =
|
||||
2 * (mpeg4_dec->b_frame->vop_time / mpeg4_dec->tframe - p_field_period);
|
||||
trb = 2 * (mpeg4_frame->vop_time / mpeg4_dec->tframe - p_field_period);
|
||||
info.trd[0] = trd_time;
|
||||
info.trb[0] = trb_time;
|
||||
|
||||
info.trd[0] = trd;
|
||||
info.trd[1] = trd;
|
||||
info.trb[0] = trb;
|
||||
info.trb[1] = trb;
|
||||
info.trd[1] = round ((double) trd_time / (double) mpeg4_dec->tframe);
|
||||
info.trb[1] = round ((double) trb_time / (double) mpeg4_dec->tframe);
|
||||
|
||||
/* backward reference */
|
||||
if (mpeg4_dec->b_frame) {
|
||||
info.forward_reference =
|
||||
info.backward_reference =
|
||||
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->
|
||||
f_frame)->src_buffer)->surface;
|
||||
b_frame)->src_buffer)->surface;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,13 +234,11 @@ gst_vdp_mpeg4_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
}
|
||||
|
||||
/* calculate vop time */
|
||||
mpeg4_frame->vop_time = vop.modulo_time_base * GST_SECOND +
|
||||
gst_util_uint64_scale (vop.time_increment, GST_SECOND,
|
||||
vol->vop_time_increment_resolution);
|
||||
GST_DEBUG ("vop_time: %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (mpeg4_frame->vop_time));
|
||||
mpeg4_frame->vop_time =
|
||||
vop.modulo_time_base * vol->vop_time_increment_resolution +
|
||||
vop.time_increment;
|
||||
|
||||
if (mpeg4_dec->tframe == GST_CLOCK_TIME_NONE && vop.coding_type == B_VOP)
|
||||
if (mpeg4_dec->tframe == -1 && vop.coding_type == B_VOP)
|
||||
mpeg4_dec->tframe = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;
|
||||
|
||||
if (vop.coding_type != B_VOP) {
|
||||
|
@ -285,7 +282,7 @@ gst_vdp_mpeg4_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
|
||||
static GstFlowReturn
|
||||
gst_vdp_mpeg4_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
||||
GstBuffer * buf, gboolean at_eos)
|
||||
GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
|
||||
{
|
||||
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
|
||||
guint8 start_code;
|
||||
|
@ -299,9 +296,7 @@ gst_vdp_mpeg4_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
|
|||
/* start_code */
|
||||
READ_UINT8 (&reader, start_code, 8);
|
||||
|
||||
mpeg4_frame =
|
||||
GST_MPEG4_FRAME_CAST (gst_base_video_decoder_get_current_frame
|
||||
(base_video_decoder));
|
||||
mpeg4_frame = GST_MPEG4_FRAME_CAST (frame);
|
||||
|
||||
/* collect packages */
|
||||
if (start_code == MPEG4_PACKET_VOS) {
|
||||
|
@ -424,7 +419,7 @@ gst_vdp_mpeg4_dec_start (GstBaseVideoDecoder * base_video_decoder)
|
|||
GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder);
|
||||
|
||||
mpeg4_dec->is_configured = FALSE;
|
||||
mpeg4_dec->tframe = GST_CLOCK_TIME_NONE;
|
||||
mpeg4_dec->tframe = -1;
|
||||
|
||||
mpeg4_dec->b_frame = NULL;
|
||||
mpeg4_dec->f_frame = NULL;
|
||||
|
|
|
@ -45,7 +45,7 @@ struct _GstVdpMpeg4Dec
|
|||
|
||||
gboolean is_configured;
|
||||
Mpeg4VideoObjectLayer vol;
|
||||
GstClockTime tframe;
|
||||
guint32 tframe;
|
||||
|
||||
GstMpeg4Frame *f_frame, *b_frame;
|
||||
};
|
||||
|
|
|
@ -74,6 +74,10 @@ mpeg4_util_parse_VOP (GstBuffer * buf, Mpeg4VideoObjectLayer * vol,
|
|||
/* set default values */
|
||||
vop->modulo_time_base = 0;
|
||||
vop->rounding_type = 0;
|
||||
vop->top_field_first = 1;
|
||||
vop->alternate_vertical_scan_flag = 0;
|
||||
vop->fcode_forward = 1;
|
||||
vop->fcode_backward = 1;
|
||||
|
||||
/* start code prefix */
|
||||
SKIP (&reader, 24);
|
||||
|
@ -366,6 +370,9 @@ mpeg4_util_parse_VOL (GstBuffer * buf, Mpeg4VisualObject * vo,
|
|||
if (!mpeg4_util_parse_quant (&reader, vol->non_intra_quant_mat,
|
||||
default_non_intra_quant_mat))
|
||||
goto error;
|
||||
} else {
|
||||
memset (&vol->intra_quant_mat, 0, 64);
|
||||
memset (&vol->non_intra_quant_mat, 0, 64);
|
||||
}
|
||||
|
||||
if (vol->verid != 0x1)
|
||||
|
|
Loading…
Reference in a new issue