d3d11decoder: Move common logic to decoder helper object

We can remove duplicated code by using the GstCodecPicture struct

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/5285>
This commit is contained in:
Seungha Yang 2023-09-05 23:06:19 +09:00 committed by GStreamer Marge Bot
parent 97fc02cfe3
commit 6cf6c73712
8 changed files with 190 additions and 601 deletions

View file

@ -704,23 +704,9 @@ gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
GstD3D11AV1DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_av1_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New AV1 picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static GstAV1Picture *
@ -749,37 +735,13 @@ gst_d3d11_av1_dec_duplicate_picture (GstAV1Decoder * decoder,
return new_picture;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_av1_dec_get_output_view_from_picture (GstD3D11AV1Dec * self,
GstAV1Picture * picture, guint8 * view_id)
{
GstD3D11AV1DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_av1_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view =
gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static GstFlowReturn
gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Dpb * dpb)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
GstD3D11AV1DecInner *inner = self->inner;
GstD3D11Decoder *d3d11_decoder = inner->d3d11_decoder;
const GstAV1SequenceHeaderOBU *seq_hdr = &inner->seq_hdr;
const GstAV1FrameHeaderOBU *frame_hdr = &picture->frame_hdr;
ID3D11VideoDecoderOutputView *view;
@ -787,8 +749,8 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
guint8 view_id = 0xff;
guint i, j;
view = gst_d3d11_av1_dec_get_output_view_from_picture (self, picture,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_OK;
@ -899,8 +861,9 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
ID3D11VideoDecoderOutputView *other_view;
guint8 other_view_id = 0xff;
other_view = gst_d3d11_av1_dec_get_output_view_from_picture (self,
other_pic, &other_view_id);
other_view =
gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (other_pic), &other_view_id);
if (!other_view) {
GST_ERROR_OBJECT (self,
"current picture does not have output view handle");
@ -1158,8 +1121,6 @@ gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
GstD3D11AV1DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
size_t bitstream_buffer_size;
size_t bitstream_pos;
GstD3D11DecodeInputStreamArgs input_args;
@ -1169,13 +1130,6 @@ gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
return GST_FLOW_ERROR;
}
view = gst_d3d11_av1_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
bitstream_pos = inner->bitstream_buffer.size ();
@ -1201,8 +1155,8 @@ gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
input_args.bitstream = &inner->bitstream_buffer[0];
input_args.bitstream_size = inner->bitstream_buffer.size ();
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -1211,36 +1165,10 @@ gst_d3d11_av1_dec_output_picture (GstAV1Decoder * decoder,
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
GstD3D11AV1DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
GST_LOG_OBJECT (self, "Outputting picture %p, %dx%d", picture,
picture->frame_hdr.render_width, picture->frame_hdr.render_height);
view_buffer = (GstBuffer *) gst_av1_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state,
picture->frame_hdr.render_width, picture->frame_hdr.render_height,
view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
gst_av1_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_av1_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
0, picture->frame_hdr.render_width, picture->frame_hdr.render_height);
}
void

View file

@ -1226,10 +1226,10 @@ gst_d3d11_decoder_submit_decoder_buffers (GstD3D11Decoder * decoder,
}
GstFlowReturn
gst_d3d11_decoder_decode_frame (GstD3D11Decoder * decoder,
ID3D11VideoDecoderOutputView * output_view,
GstD3D11DecodeInputStreamArgs * input_args)
gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
GstCodecPicture * picture, GstD3D11DecodeInputStreamArgs * input_args)
{
ID3D11VideoDecoderOutputView *output_view;
guint d3d11_buffer_size;
gpointer d3d11_buffer;
D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4];
@ -1237,9 +1237,16 @@ gst_d3d11_decoder_decode_frame (GstD3D11Decoder * decoder,
GstFlowReturn ret = GST_FLOW_OK;
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), GST_FLOW_ERROR);
g_return_val_if_fail (output_view != nullptr, GST_FLOW_ERROR);
g_return_val_if_fail (picture != nullptr, GST_FLOW_ERROR);
g_return_val_if_fail (input_args != nullptr, GST_FLOW_ERROR);
output_view = gst_d3d11_decoder_get_output_view_from_picture (decoder,
picture, nullptr);
if (!output_view) {
GST_ERROR_OBJECT (decoder, "No output view attached");
return GST_FLOW_ERROR;
}
memset (buffer_desc, 0, sizeof (buffer_desc));
buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
@ -1387,14 +1394,14 @@ error:
return GST_FLOW_ERROR;
}
GstBuffer *
gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec)
GstFlowReturn
gst_d3d11_decoder_new_picture (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec, GstCodecPicture * picture)
{
GstBuffer *buf = NULL;
GstFlowReturn ret;
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), GST_FLOW_ERROR);
if (!decoder->internal_pool) {
/* Try negotiate again whatever the previous negotiation result was.
@ -1407,15 +1414,14 @@ gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
if (!gst_d3d11_decoder_prepare_output_view_pool (decoder)) {
GST_ERROR_OBJECT (videodec, "Failed to setup internal pool");
return NULL;
return GST_FLOW_ERROR;
}
} else if (!gst_buffer_pool_set_active (decoder->internal_pool, TRUE)) {
GST_ERROR_OBJECT (videodec, "Couldn't set active internal pool");
return NULL;
return GST_FLOW_ERROR;
}
ret = gst_buffer_pool_acquire_buffer (decoder->internal_pool, &buf, NULL);
if (ret != GST_FLOW_OK || !buf) {
if (ret != GST_FLOW_FLUSHING) {
GST_ERROR_OBJECT (videodec, "Couldn't get buffer from pool, ret %s",
@ -1424,34 +1430,47 @@ gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
GST_DEBUG_OBJECT (videodec, "We are flusing");
}
return NULL;
return ret;
}
if (!gst_d3d11_decoder_ensure_output_view (decoder, buf)) {
GST_ERROR_OBJECT (videodec, "Output view unavailable");
gst_buffer_unref (buf);
return NULL;
return GST_FLOW_ERROR;
}
return buf;
gst_codec_picture_set_user_data (picture,
buf, (GDestroyNotify) gst_buffer_unref);
return GST_FLOW_OK;
}
ID3D11VideoDecoderOutputView *
gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
GstBuffer * buffer, guint8 * index)
gst_d3d11_decoder_get_output_view_from_picture (GstD3D11Decoder * decoder,
GstCodecPicture * picture, guint8 * index)
{
GstMemory *mem;
GstD3D11Memory *dmem;
ID3D11VideoDecoderOutputView *view;
GstBuffer *buffer;
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), NULL);
g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), nullptr);
g_return_val_if_fail (picture, nullptr);
if (index)
*index = 0xff;
buffer = (GstBuffer *) gst_codec_picture_get_user_data (picture);
if (!buffer) {
GST_DEBUG_OBJECT (decoder, "picture without attached user data");
return nullptr;
}
mem = gst_buffer_peek_memory (buffer, 0);
if (!gst_is_d3d11_memory (mem)) {
GST_WARNING_OBJECT (decoder, "Not a d3d11 memory");
return NULL;
return nullptr;
}
dmem = (GstD3D11Memory *) mem;
@ -1460,7 +1479,7 @@ gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
if (!view) {
GST_ERROR_OBJECT (decoder, "Decoder output view is unavailable");
return NULL;
return nullptr;
}
if (index) {
@ -1595,20 +1614,25 @@ gst_d3d11_decoder_crop_and_copy_buffer (GstD3D11Decoder * self,
return TRUE;
}
gboolean
gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec, GstVideoCodecState * input_state,
gint display_width, gint display_height,
GstBuffer * decoder_buffer, GstBuffer ** output)
GstFlowReturn
gst_d3d11_decoder_output_picture (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec, GstVideoCodecFrame * frame,
GstCodecPicture * picture, guint buffer_flags,
gint display_width, gint display_height)
{
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
g_return_val_if_fail (GST_IS_VIDEO_DECODER (videodec), FALSE);
g_return_val_if_fail (GST_IS_BUFFER (decoder_buffer), FALSE);
g_return_val_if_fail (output != NULL, FALSE);
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *view_buffer;
if (input_state) {
if (picture->discont_state) {
g_clear_pointer (&decoder->input_state, gst_video_codec_state_unref);
decoder->input_state = gst_video_codec_state_ref (input_state);
decoder->input_state = gst_video_codec_state_ref (picture->discont_state);
}
view_buffer = (GstBuffer *) gst_codec_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (decoder, "Could not get output view");
ret = GST_FLOW_ERROR;
goto error;
}
if (display_width != GST_VIDEO_INFO_WIDTH (&decoder->output_info) ||
@ -1622,29 +1646,31 @@ gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
if (!gst_video_decoder_negotiate (videodec)) {
GST_ERROR_OBJECT (videodec, "Failed to re-negotiate with new frame size");
return FALSE;
ret = GST_FLOW_NOT_NEGOTIATED;
goto error;
}
} else if (input_state) {
} else if (picture->discont_state) {
if (!gst_video_decoder_negotiate (videodec)) {
GST_ERROR_OBJECT (videodec, "Could not re-negotiate with updated state");
return FALSE;
ret = GST_FLOW_NOT_NEGOTIATED;
goto error;
}
}
if (gst_d3d11_decoder_can_direct_render (decoder, videodec, decoder_buffer,
if (gst_d3d11_decoder_can_direct_render (decoder, videodec, view_buffer,
display_width, display_height)) {
GstMemory *mem;
mem = gst_buffer_peek_memory (decoder_buffer, 0);
mem = gst_buffer_peek_memory (view_buffer, 0);
GST_MINI_OBJECT_FLAG_SET (mem, GST_D3D11_MEMORY_TRANSFER_NEED_DOWNLOAD);
if (decoder->need_crop) {
GstVideoCropMeta *crop_meta;
decoder_buffer = gst_buffer_make_writable (decoder_buffer);
crop_meta = gst_buffer_get_video_crop_meta (decoder_buffer);
view_buffer = gst_buffer_make_writable (view_buffer);
crop_meta = gst_buffer_get_video_crop_meta (view_buffer);
if (!crop_meta)
crop_meta = gst_buffer_add_video_crop_meta (decoder_buffer);
crop_meta = gst_buffer_add_video_crop_meta (view_buffer);
crop_meta->x = decoder->offset_x;
crop_meta->y = decoder->offset_y;
@ -1654,19 +1680,32 @@ gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
GST_TRACE_OBJECT (decoder, "Attatching crop meta");
}
*output = gst_buffer_ref (decoder_buffer);
return TRUE;
}
*output = gst_video_decoder_allocate_output_buffer (videodec);
if (*output == NULL) {
frame->output_buffer = gst_buffer_ref (view_buffer);
} else {
frame->output_buffer = gst_video_decoder_allocate_output_buffer (videodec);
if (!frame->output_buffer) {
GST_ERROR_OBJECT (videodec, "Couldn't allocate output buffer");
return FALSE;
ret = GST_FLOW_ERROR;
goto error;
}
return gst_d3d11_decoder_crop_and_copy_buffer (decoder, decoder_buffer,
*output);
if (!gst_d3d11_decoder_crop_and_copy_buffer (decoder, view_buffer,
frame->output_buffer)) {
ret = GST_FLOW_ERROR;
goto error;
}
}
GST_BUFFER_FLAG_SET (frame->output_buffer, buffer_flags);
gst_codec_picture_unref (picture);
return gst_video_decoder_finish_frame (videodec, frame);
error:
gst_codec_picture_unref (picture);
gst_video_decoder_release_frame (videodec, frame);
return ret;
}
gboolean

View file

@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/d3d11/gstd3d11.h>
#include <gst/codecs/gstcodecpicture.h>
G_BEGIN_DECLS
@ -83,25 +84,25 @@ gboolean gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
gint coded_height,
guint dpb_size);
GstFlowReturn gst_d3d11_decoder_decode_frame (GstD3D11Decoder * decoder,
ID3D11VideoDecoderOutputView * output_view,
GstFlowReturn gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
GstCodecPicture * picture,
GstD3D11DecodeInputStreamArgs * input_args);
GstFlowReturn gst_d3d11_decoder_new_picture (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec,
GstCodecPicture * picture);
GstBuffer * gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec);
ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
GstBuffer * buffer,
ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_picture (GstD3D11Decoder * decoder,
GstCodecPicture * picture,
guint8 * view_id);
gboolean gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
GstFlowReturn gst_d3d11_decoder_output_picture (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec,
GstVideoCodecState * in_state,
GstVideoCodecFrame * frame,
GstCodecPicture * picture,
guint buffer_flags,
gint display_width,
gint display_height,
GstBuffer * decoder_buffer,
GstBuffer ** output);
gint display_height);
gboolean gst_d3d11_decoder_negotiate (GstD3D11Decoder * decoder,
GstVideoDecoder * videodec);

View file

@ -480,23 +480,9 @@ gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_h264_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New h264picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static GstFlowReturn
@ -522,30 +508,6 @@ gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
return GST_FLOW_OK;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
GstH264Picture * picture, guint8 * view_id)
{
GstD3D11H264DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view = gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static void
gst_d3d11_h264_dec_picture_params_from_sps (GstD3D11H264Dec * self,
const GstH264SPS * sps, gboolean field_pic, DXVA_PicParams_H264 * params)
@ -670,8 +632,8 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
pps = slice->header.pps;
view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
@ -707,7 +669,8 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
if (other->second_field)
continue;
gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (other), &id);
pic_params->RefFrameList[j].Index7Bits = id;
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
@ -813,8 +776,6 @@ gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
GstD3D11H264DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
size_t bitstream_buffer_size;
size_t bitstream_pos;
GstD3D11DecodeInputStreamArgs input_args;
@ -827,13 +788,6 @@ gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
return GST_FLOW_ERROR;
}
view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
bitstream_pos = inner->bitstream_buffer.size ();
@ -861,8 +815,8 @@ gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
input_args.inverse_quantization_matrix = &inner->iq_matrix;
input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264);
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -871,46 +825,10 @@ gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
GstD3D11H264DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
GST_LOG_OBJECT (self,
"Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state, inner->width,
inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
if (picture->buffer_flags != 0) {
gboolean interlaced =
(picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
GST_TRACE_OBJECT (self,
"apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
picture->buffer_flags, interlaced, tff);
GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
}
gst_h264_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_h264_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
picture->buffer_flags, inner->width, inner->height);
}
void

View file

@ -432,23 +432,9 @@ gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
GstD3D11H265DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_h265_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New h265picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static void
@ -604,30 +590,6 @@ gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self,
return TRUE;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
GstH265Picture * picture, guint8 * view_id)
{
GstD3D11H265DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view = gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static UCHAR
gst_d3d11_h265_dec_get_ref_index (const DXVA_PicParams_HEVC * pic_params,
guint8 view_id)
@ -673,8 +635,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
pps = slice->header.pps;
sps = pps->sps;
view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
@ -700,7 +662,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
continue;
}
gst_d3d11_h265_dec_get_output_view_from_picture (self, other, &id);
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (other), &id);
pic_params->RefPicList[j].Index7Bits = id;
pic_params->RefPicList[j].AssociatedFlag = other->long_term;
pic_params->PicOrderCntValList[j] = other->pic_order_cnt;
@ -719,8 +682,9 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) {
ID3D11VideoDecoderOutputView *other_view;
other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
other, &other_view_id);
other_view =
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (other), &other_view_id);
if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
@ -740,8 +704,9 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) {
ID3D11VideoDecoderOutputView *other_view;
other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
other, &other_view_id);
other_view =
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (other), &other_view_id);
if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
@ -761,8 +726,10 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) {
ID3D11VideoDecoderOutputView *other_view;
other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
other, &other_view_id);
other_view =
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (other), &other_view_id);
if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
@ -857,8 +824,6 @@ gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
GstD3D11H265DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
size_t bitstream_buffer_size;
size_t bitstream_pos;
GstD3D11DecodeInputStreamArgs input_args;
@ -871,13 +836,6 @@ gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
return GST_FLOW_ERROR;
}
view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
bitstream_pos = inner->bitstream_buffer.size ();
@ -908,8 +866,8 @@ gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_HEVC);
}
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -918,37 +876,10 @@ gst_d3d11_h265_dec_output_picture (GstH265Decoder * decoder,
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
GstD3D11H265DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
GST_LOG_OBJECT (self, "Outputting picture %p, poc %d, picture_struct %d, "
"buffer flags 0x%x", picture, picture->pic_order_cnt, picture->pic_struct,
picture->buffer_flags);
view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state, inner->width,
inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
gst_h265_picture_unref (picture);
return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
error:
gst_h265_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
picture->buffer_flags, inner->width, inner->height);
}
void

View file

@ -415,23 +415,9 @@ gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
GstD3D11Mpeg2DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_ERROR;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_mpeg2_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New MPEG2 picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static GstFlowReturn
@ -458,34 +444,6 @@ gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder * decoder,
return GST_FLOW_OK;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
GstMpeg2Picture * picture, guint8 * view_id)
{
GstD3D11Mpeg2DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
if (!picture)
return NULL;
view_buffer = (GstBuffer *) gst_mpeg2_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view =
gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static inline WORD
_pack_f_codes (guint8 f_code[2][2])
{
@ -518,6 +476,7 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
GstD3D11Mpeg2DecInner *inner = self->inner;
GstD3D11Decoder *d3d11_decoder = inner->d3d11_decoder;
DXVA_PictureParameters *pic_params = &inner->pic_params;
DXVA_QmatrixData *iq_matrix = &inner->iq_matrix;
ID3D11VideoDecoderOutputView *view;
@ -527,8 +486,8 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
gboolean is_field =
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
@ -546,8 +505,8 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
case GST_MPEG_VIDEO_PICTURE_TYPE_B:{
if (next_picture) {
other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
next_picture, &other_view_id);
gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (next_picture), &other_view_id);
if (other_view)
pic_params->wBackwardRefPictureIndex = other_view_id;
}
@ -556,8 +515,8 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
case GST_MPEG_VIDEO_PICTURE_TYPE_P:{
if (prev_picture) {
other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
prev_picture, &other_view_id);
gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (prev_picture), &other_view_id);
if (other_view)
pic_params->wForwardRefPictureIndex = other_view_id;
}
@ -677,8 +636,6 @@ gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
GstD3D11Mpeg2DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
GstD3D11DecodeInputStreamArgs input_args;
gboolean is_field =
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
@ -689,13 +646,6 @@ gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
return GST_FLOW_ERROR;
}
view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
DXVA_SliceInfo *first = &inner->slice_list[0];
@ -724,8 +674,8 @@ gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
input_args.inverse_quantization_matrix_size = sizeof (DXVA_QmatrixData);
}
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -734,45 +684,10 @@ gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
{
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
GstD3D11Mpeg2DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
GST_LOG_OBJECT (self, "Outputting picture %p", picture);
view_buffer = (GstBuffer *) gst_mpeg2_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state, inner->width,
inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
if (picture->buffer_flags != 0) {
gboolean interlaced =
(picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
GST_TRACE_OBJECT (self,
"apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
picture->buffer_flags, interlaced, tff);
GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
}
gst_mpeg2_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_mpeg2_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
picture->buffer_flags, inner->width, inner->height);
}
void

View file

@ -347,23 +347,9 @@ gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
GstD3D11Vp8DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_vp8_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New VP8 picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static GstFlowReturn
@ -378,31 +364,6 @@ gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
return GST_FLOW_OK;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
GstVp8Picture * picture, guint8 * view_id)
{
GstD3D11Vp8DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_vp8_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view =
gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static void
gst_d3d11_vp8_dec_copy_frame_params (GstD3D11Vp8Dec * self,
GstVp8Picture * picture, GstVp8Parser * parser, DXVA_PicParams_VP8 * params)
@ -462,12 +423,13 @@ gst_d3d11_vp8_dec_copy_reference_frames (GstD3D11Vp8Dec * self,
DXVA_PicParams_VP8 * params)
{
GstVp8Decoder *decoder = GST_VP8_DECODER (self);
GstD3D11Decoder *d3d11_decoder = self->inner->d3d11_decoder;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
if (decoder->alt_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->alt_ref_picture, &view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (decoder->alt_ref_picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
@ -479,8 +441,8 @@ gst_d3d11_vp8_dec_copy_reference_frames (GstD3D11Vp8Dec * self,
}
if (decoder->golden_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->golden_ref_picture, &view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (decoder->golden_ref_picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
@ -492,8 +454,8 @@ gst_d3d11_vp8_dec_copy_reference_frames (GstD3D11Vp8Dec * self,
}
if (decoder->last_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->last_picture, &view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
GST_CODEC_PICTURE (decoder->last_picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
@ -545,8 +507,9 @@ gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
guint8 view_id = 0xff;
const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
picture, &view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
@ -579,8 +542,6 @@ gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
GstD3D11Vp8DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
size_t bitstream_buffer_size;
size_t bitstream_pos;
GstD3D11DecodeInputStreamArgs input_args;
@ -590,13 +551,6 @@ gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
return GST_FLOW_ERROR;
}
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
bitstream_pos = inner->bitstream_buffer.size ();
@ -620,8 +574,8 @@ gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
input_args.bitstream = &inner->bitstream_buffer[0];
input_args.bitstream_size = inner->bitstream_buffer.size ();
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -630,36 +584,10 @@ gst_d3d11_vp8_dec_output_picture (GstVp8Decoder * decoder,
{
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
GstD3D11Vp8DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
g_assert (picture->frame_hdr.show_frame);
GST_LOG_OBJECT (self, "Outputting picture %p", picture);
view_buffer = (GstBuffer *) gst_vp8_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state, inner->width,
inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
gst_vp8_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_vp8_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
0, inner->width, inner->height);
}
void

View file

@ -405,23 +405,9 @@ gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder,
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
GstD3D11Vp9DecInner *inner = self->inner;
GstBuffer *view_buffer;
view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
gst_vp9_picture_set_user_data (picture,
view_buffer, (GDestroyNotify) gst_buffer_unref);
GST_LOG_OBJECT (self, "New VP9 picture %p", picture);
return GST_FLOW_OK;
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
}
static GstVp9Picture *
@ -463,31 +449,6 @@ gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
return GST_FLOW_OK;
}
static ID3D11VideoDecoderOutputView *
gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, guint8 * view_id)
{
GstD3D11Vp9DecInner *inner = self->inner;
GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view;
view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
return NULL;
}
view =
gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
view_buffer, view_id);
if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL;
}
return view;
}
static void
gst_d3d11_vp9_dec_copy_frame_params (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
@ -525,6 +486,7 @@ static void
gst_d3d11_vp9_dec_copy_reference_frames (GstD3D11Vp9Dec * self,
GstVp9Picture * picture, GstVp9Dpb * dpb, DXVA_PicParams_VP9 * params)
{
GstD3D11Decoder *decoder = self->inner->d3d11_decoder;
gint i;
for (i = 0; i < GST_VP9_REF_FRAMES; i++) {
@ -533,8 +495,8 @@ gst_d3d11_vp9_dec_copy_reference_frames (GstD3D11Vp9Dec * self,
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, other_pic,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (decoder,
GST_CODEC_PICTURE (other_pic), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle");
return;
@ -676,8 +638,8 @@ gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture,
&view_id);
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
@ -717,8 +679,6 @@ gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
GstD3D11Vp9DecInner *inner = self->inner;
ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
size_t bitstream_buffer_size;
size_t bitstream_pos;
GstD3D11DecodeInputStreamArgs input_args;
@ -728,13 +688,6 @@ gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
return GST_FLOW_ERROR;
}
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self,
picture, &view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
bitstream_pos = inner->bitstream_buffer.size ();
@ -758,8 +711,8 @@ gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
input_args.bitstream = &inner->bitstream_buffer[0];
input_args.bitstream_size = inner->bitstream_buffer.size ();
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
GST_CODEC_PICTURE (picture), &input_args);
}
static GstFlowReturn
@ -768,34 +721,10 @@ gst_d3d11_vp9_dec_output_picture (GstVp9Decoder * decoder,
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
GstD3D11Vp9DecInner *inner = self->inner;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *view_buffer;
GST_LOG_OBJECT (self, "Outputting picture %p", picture);
view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
goto error;
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
GST_CODEC_PICTURE (picture)->discont_state, picture->frame_hdr.width,
picture->frame_hdr.height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}
gst_vp9_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_vp9_picture_unref (picture);
gst_video_decoder_release_frame (vdec, frame);
return GST_FLOW_ERROR;
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
0, picture->frame_hdr.width, picture->frame_hdr.height);
}
void