d3d11decoder: Implement array-of-texture DPB again

Re-implementation of array-of-texture based on d3d11 memory pool.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2097>
This commit is contained in:
Seungha Yang 2021-03-20 19:52:16 +09:00 committed by GStreamer Marge Bot
parent 39b9f79e11
commit b392ed82ba
7 changed files with 148 additions and 112 deletions

View file

@ -151,6 +151,9 @@ struct _GstD3D11Decoder
guint dpb_size; guint dpb_size;
guint downstream_min_buffers; guint downstream_min_buffers;
/* Used for array-of-texture */
guint8 next_view_id;
/* for staging */ /* for staging */
ID3D11Texture2D *staging; ID3D11Texture2D *staging;
gsize staging_texture_offset[GST_VIDEO_MAX_PLANES]; gsize staging_texture_offset[GST_VIDEO_MAX_PLANES];
@ -356,11 +359,25 @@ gst_d3d11_decoder_is_configured (GstD3D11Decoder * decoder)
return decoder->configured; return decoder->configured;
} }
static GQuark
gst_d3d11_decoder_view_id_quark (void)
{
static gsize id_quark = 0;
if (g_once_init_enter (&id_quark)) {
GQuark quark = g_quark_from_string ("GstD3D11DecoderViewId");
g_once_init_leave (&id_quark, quark);
}
return (GQuark) id_quark;
}
static gboolean static gboolean
gst_d3d11_decoder_ensure_output_view (GstD3D11Decoder * self, gst_d3d11_decoder_ensure_output_view (GstD3D11Decoder * self,
GstBuffer * buffer) GstBuffer * buffer)
{ {
GstD3D11Memory *mem; GstD3D11Memory *mem;
gpointer val = NULL;
mem = (GstD3D11Memory *) gst_buffer_peek_memory (buffer, 0); mem = (GstD3D11Memory *) gst_buffer_peek_memory (buffer, 0);
if (!gst_d3d11_memory_get_decoder_output_view (mem, self->video_device, if (!gst_d3d11_memory_get_decoder_output_view (mem, self->video_device,
@ -369,6 +386,28 @@ gst_d3d11_decoder_ensure_output_view (GstD3D11Decoder * self,
return FALSE; return FALSE;
} }
if (!self->use_array_of_texture)
return TRUE;
val = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
gst_d3d11_decoder_view_id_quark ());
if (!val) {
g_assert (self->next_view_id < 128);
g_assert (self->next_view_id > 0);
gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
gst_d3d11_decoder_view_id_quark (),
GUINT_TO_POINTER (self->next_view_id), NULL);
self->next_view_id++;
/* valid view range is [0, 126], but 0 is not used to here
* (it's NULL as well) */
self->next_view_id %= 128;
if (self->next_view_id == 0)
self->next_view_id = 1;
}
return TRUE; return TRUE;
} }
@ -410,8 +449,17 @@ gst_d3d11_decoder_prepare_output_view_pool (GstD3D11Decoder * self)
"(dpb size: %d, downstream min buffers: %d)", pool_size, self->dpb_size, "(dpb size: %d, downstream min buffers: %d)", pool_size, self->dpb_size,
self->downstream_min_buffers); self->downstream_min_buffers);
if (!self->use_array_of_texture) if (!self->use_array_of_texture) {
alloc_params->desc[0].ArraySize = pool_size; alloc_params->desc[0].ArraySize = pool_size;
} else {
/* Valid view id is [0, 126], but we will use [1, 127] range so that
* it can be used by qdata, because zero is equal to null */
self->next_view_id = 1;
/* our pool size can be increased as much as possbile */
pool_size = 0;
}
gst_video_alignment_reset (&align); gst_video_alignment_reset (&align);
align.padding_right = self->aligned_width - GST_VIDEO_INFO_WIDTH (info); align.padding_right = self->aligned_width - GST_VIDEO_INFO_WIDTH (info);
@ -843,18 +891,12 @@ gst_d3d11_decoder_open (GstD3D11Decoder * self)
GST_DEBUG_OBJECT (self, "ConfigDecoderSpecific 0x%x", GST_DEBUG_OBJECT (self, "ConfigDecoderSpecific 0x%x",
best_config->ConfigDecoderSpecific); best_config->ConfigDecoderSpecific);
/* FIXME: Revisit this at some point.
* Some 4K VP9 + super frame enabled streams would be broken with
* this configuration (driver crash) on Intel and Nvidia
*/
#if 0
/* bit 14 is equal to 1b means this config support array of texture and /* bit 14 is equal to 1b means this config support array of texture and
* it's recommended type as per DXVA spec */ * it's recommended type as per DXVA spec */
if ((best_config->ConfigDecoderSpecific & 0x4000) == 0x4000) { if ((best_config->ConfigDecoderSpecific & 0x4000) == 0x4000) {
GST_DEBUG_OBJECT (self, "Config support array of texture"); GST_DEBUG_OBJECT (self, "Config support array of texture");
self->use_array_of_texture = TRUE; self->use_array_of_texture = TRUE;
} }
#endif
hr = video_device->CreateVideoDecoder (&decoder_desc, hr = video_device->CreateVideoDecoder (&decoder_desc,
best_config, &self->decoder_handle); best_config, &self->decoder_handle);
@ -1101,7 +1143,7 @@ gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * decoder,
ID3D11VideoDecoderOutputView * ID3D11VideoDecoderOutputView *
gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder, gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
GstBuffer * buffer) GstBuffer * buffer, guint8 * index)
{ {
GstMemory *mem; GstMemory *mem;
GstD3D11Memory *dmem; GstD3D11Memory *dmem;
@ -1125,22 +1167,28 @@ gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
return NULL; return NULL;
} }
if (index) {
if (decoder->use_array_of_texture) {
guint8 id;
gpointer val = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
gst_d3d11_decoder_view_id_quark ());
if (!val) {
GST_ERROR_OBJECT (decoder, "memory has no qdata");
return NULL;
}
id = (guint8) GPOINTER_TO_UINT (val);
g_assert (id < 128);
*index = (id - 1);
} else {
*index = gst_d3d11_memory_get_subresource_index (dmem);
}
}
return view; return view;
} }
guint8
gst_d3d11_decoder_get_output_view_index (ID3D11VideoDecoderOutputView *
view_handle)
{
D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc;
g_return_val_if_fail (view_handle != NULL, 0xff);
view_handle->GetDesc (&view_desc);
return view_desc.Texture2D.ArraySlice;
}
static gboolean static gboolean
copy_to_system (GstD3D11Decoder * self, GstVideoInfo * info, gint display_width, copy_to_system (GstD3D11Decoder * self, GstVideoInfo * info, gint display_width,
gint display_height, GstBuffer * decoder_buffer, GstBuffer * output) gint display_height, GstBuffer * decoder_buffer, GstBuffer * output)
@ -1551,7 +1599,13 @@ gboolean
gst_d3d11_decoder_can_direct_render (GstD3D11Decoder * decoder, gst_d3d11_decoder_can_direct_render (GstD3D11Decoder * decoder,
GstBuffer * view_buffer, GstMiniObject * picture) GstBuffer * view_buffer, GstMiniObject * picture)
{ {
return FALSE; g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
if (!decoder->can_direct_rendering || !decoder->downstream_supports_d3d11 ||
!decoder->use_array_of_texture)
return FALSE;
return TRUE;
} }
/* Keep sync with chromium and keep in sorted order. /* Keep sync with chromium and keep in sorted order.

View file

@ -89,9 +89,8 @@ GstBuffer * gst_d3d11_decoder_get_output_view_buffer (GstD3D11Decoder * de
GstVideoDecoder * videodec); GstVideoDecoder * videodec);
ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder, ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_buffer (GstD3D11Decoder * decoder,
GstBuffer * buffer); GstBuffer * buffer,
guint8 * view_id);
guint8 gst_d3d11_decoder_get_output_view_index (ID3D11VideoDecoderOutputView * view_handle);
gboolean gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder, gboolean gst_d3d11_decoder_process_output (GstD3D11Decoder * decoder,
GstVideoInfo * info, GstVideoInfo * info,

View file

@ -549,7 +549,7 @@ gst_d3d11_h264_dec_get_bitstream_buffer (GstD3D11H264Dec * self)
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self, gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
GstH264Picture * picture) GstH264Picture * picture, guint8 * view_id)
{ {
GstBuffer *view_buffer; GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
@ -561,7 +561,7 @@ gst_d3d11_h264_dec_get_output_view_from_picture (GstD3D11H264Dec * self,
} }
view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
view_buffer); view_buffer, view_id);
if (!view) { if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL; return NULL;
@ -576,6 +576,7 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
{ {
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder); GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
GArray *dpb_array; GArray *dpb_array;
GstH264SPS *sps; GstH264SPS *sps;
GstH264PPS *pps; GstH264PPS *pps;
@ -591,7 +592,8 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
sps = pps->sequence; sps = pps->sequence;
g_assert (sps != NULL); g_assert (sps != NULL);
view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -616,8 +618,7 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
for (i = dpb_array->len - 1, j = 0; i >= 0 && j < 16; i--) { for (i = dpb_array->len - 1, j = 0; i >= 0 && j < 16; i--) {
GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i); GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
ID3D11VideoDecoderOutputView *other_view; guint8 id = 0xff;
gint id = 0xff;
if (!GST_H264_PICTURE_IS_REF (other)) if (!GST_H264_PICTURE_IS_REF (other))
continue; continue;
@ -626,11 +627,7 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
if (other->second_field) if (other->second_field)
continue; continue;
other_view = gst_d3d11_h264_dec_get_output_view_from_picture (self, other); gst_d3d11_h264_dec_get_output_view_from_picture (self, other, &id);
if (other_view)
id = gst_d3d11_decoder_get_output_view_index (other_view);
self->ref_frame_list[j].Index7Bits = id; self->ref_frame_list[j].Index7Bits = id;
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) { if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
@ -681,8 +678,7 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, &pic_params); gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, &pic_params);
pic_params.CurrPic.Index7Bits = pic_params.CurrPic.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
pic_params.RefPicFlag = GST_H264_PICTURE_IS_REF (picture); pic_params.RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
pic_params.frame_num = picture->frame_num; pic_params.frame_num = picture->frame_num;

View file

@ -509,7 +509,7 @@ gst_d3d11_h265_dec_get_bitstream_buffer (GstD3D11H265Dec * self)
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self, gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
GstH265Picture * picture) GstH265Picture * picture, guint8 * view_id)
{ {
GstBuffer *view_buffer; GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
@ -521,7 +521,7 @@ gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self,
} }
view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
view_buffer); view_buffer, view_id);
if (!view) { if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL; return NULL;
@ -548,6 +548,7 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
{ {
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
guint i, j; guint i, j;
GArray *dpb_array; GArray *dpb_array;
GstH265SPS *sps; GstH265SPS *sps;
@ -564,7 +565,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
sps = pps->sps; sps = pps->sps;
g_assert (sps != NULL); g_assert (sps != NULL);
view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -593,19 +595,14 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
for (i = 0; i < dpb_array->len && i < G_N_ELEMENTS (self->ref_pic_list); i++) { for (i = 0; i < dpb_array->len && i < G_N_ELEMENTS (self->ref_pic_list); i++) {
GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i); GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i);
ID3D11VideoDecoderOutputView *other_view; guint8 id = 0xff;
gint id = 0xff;
if (!other->ref) { if (!other->ref) {
GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i); GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i);
continue; continue;
} }
other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self, other); gst_d3d11_h265_dec_get_output_view_from_picture (self, other, &id);
if (other_view)
id = gst_d3d11_decoder_get_output_view_index (other_view);
self->ref_pic_list[i].Index7Bits = id; self->ref_pic_list[i].Index7Bits = id;
self->ref_pic_list[i].AssociatedFlag = other->long_term; self->ref_pic_list[i].AssociatedFlag = other->long_term;
self->pic_order_cnt_val_list[i] = other->pic_order_cnt; self->pic_order_cnt_val_list[i] = other->pic_order_cnt;
@ -613,7 +610,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_before); i++) { for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_before); i++) {
GstH265Picture *other = NULL; GstH265Picture *other = NULL;
gint id = 0xff; guint8 other_view_id = 0xff;
guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocStCurrBefore) while (other == NULL && j < decoder->NumPocStCurrBefore)
other = decoder->RefPicSetStCurrBefore[j++]; other = decoder->RefPicSetStCurrBefore[j++];
@ -621,13 +619,11 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) { if (other) {
ID3D11VideoDecoderOutputView *other_view; ID3D11VideoDecoderOutputView *other_view;
other_view = other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
gst_d3d11_h265_dec_get_output_view_from_picture (self, other); other, &other_view_id);
if (other_view) { if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (self, id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
gst_d3d11_decoder_get_output_view_index (other_view));
}
} }
self->ref_pic_set_st_curr_before[i] = id; self->ref_pic_set_st_curr_before[i] = id;
@ -635,7 +631,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_after); i++) { for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_after); i++) {
GstH265Picture *other = NULL; GstH265Picture *other = NULL;
gint id = 0xff; guint8 other_view_id = 0xff;
guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocStCurrAfter) while (other == NULL && j < decoder->NumPocStCurrAfter)
other = decoder->RefPicSetStCurrAfter[j++]; other = decoder->RefPicSetStCurrAfter[j++];
@ -643,13 +640,11 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) { if (other) {
ID3D11VideoDecoderOutputView *other_view; ID3D11VideoDecoderOutputView *other_view;
other_view = other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
gst_d3d11_h265_dec_get_output_view_from_picture (self, other); other, &other_view_id);
if (other_view) { if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (self, id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
gst_d3d11_decoder_get_output_view_index (other_view));
}
} }
self->ref_pic_set_st_curr_after[i] = id; self->ref_pic_set_st_curr_after[i] = id;
@ -657,7 +652,8 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_lt_curr); i++) { for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_lt_curr); i++) {
GstH265Picture *other = NULL; GstH265Picture *other = NULL;
gint id = 0xff; guint8 other_view_id = 0xff;
guint8 id = 0xff;
while (other == NULL && j < decoder->NumPocLtCurr) while (other == NULL && j < decoder->NumPocLtCurr)
other = decoder->RefPicSetLtCurr[j++]; other = decoder->RefPicSetLtCurr[j++];
@ -665,29 +661,19 @@ gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
if (other) { if (other) {
ID3D11VideoDecoderOutputView *other_view; ID3D11VideoDecoderOutputView *other_view;
other_view = other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self,
gst_d3d11_h265_dec_get_output_view_from_picture (self, other); other, &other_view_id);
if (other_view) { if (other_view)
id = gst_d3d11_h265_dec_get_ref_index (self, id = gst_d3d11_h265_dec_get_ref_index (self, other_view_id);
gst_d3d11_decoder_get_output_view_index (other_view));
}
} }
self->ref_pic_set_lt_curr[i] = id; self->ref_pic_set_lt_curr[i] = id;
} }
view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view");
return FALSE;
}
gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, &pic_params); gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, &pic_params);
pic_params.CurrPic.Index7Bits = pic_params.CurrPic.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
pic_params.IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type); pic_params.IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
pic_params.IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type); pic_params.IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type);
pic_params.IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type); pic_params.IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);

View file

@ -519,7 +519,7 @@ gst_d3d11_mpeg2_dec_get_bitstream_buffer (GstD3D11Mpeg2Dec * self)
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self, gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
GstMpeg2Picture * picture) GstMpeg2Picture * picture, guint8 * view_id)
{ {
GstBuffer *view_buffer; GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
@ -535,7 +535,7 @@ gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
view = view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
view_buffer); view_buffer, view_id);
if (!view) { if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL; return NULL;
@ -577,6 +577,8 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder); GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
ID3D11VideoDecoderOutputView *other_view; ID3D11VideoDecoderOutputView *other_view;
guint8 view_id = 0xff;
guint8 other_view_id = 0xff;
DXVA_PictureParameters pic_params = { 0, }; DXVA_PictureParameters pic_params = { 0, };
DXVA_QmatrixData iq_matrix = { 0, }; DXVA_QmatrixData iq_matrix = { 0, };
guint d3d11_buffer_size = 0; guint d3d11_buffer_size = 0;
@ -584,7 +586,8 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
gboolean is_field = gboolean is_field =
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME; picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -597,8 +600,7 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
} }
/* Fill DXVA_PictureParameters */ /* Fill DXVA_PictureParameters */
pic_params.wDecodedPictureIndex = pic_params.wDecodedPictureIndex = view_id;
gst_d3d11_decoder_get_output_view_index (view);
pic_params.wForwardRefPictureIndex = 0xffff; pic_params.wForwardRefPictureIndex = 0xffff;
pic_params.wBackwardRefPictureIndex = 0xffff; pic_params.wBackwardRefPictureIndex = 0xffff;
@ -607,10 +609,9 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
if (next_picture) { if (next_picture) {
other_view = other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
next_picture); next_picture, &other_view_id);
if (other_view) if (other_view)
pic_params.wBackwardRefPictureIndex = pic_params.wBackwardRefPictureIndex = other_view_id;
gst_d3d11_decoder_get_output_view_index (other_view);
} }
} }
/* fall-through */ /* fall-through */
@ -618,10 +619,9 @@ gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
if (prev_picture) { if (prev_picture) {
other_view = other_view =
gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
prev_picture); prev_picture, &other_view_id);
if (other_view) if (other_view)
pic_params.wForwardRefPictureIndex = pic_params.wForwardRefPictureIndex = other_view_id;
gst_d3d11_decoder_get_output_view_index (other_view);
} }
} }
default: default:

View file

@ -451,7 +451,7 @@ error:
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self, gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
GstVp8Picture * picture) GstVp8Picture * picture, guint8 * view_id)
{ {
GstBuffer *view_buffer; GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
@ -464,7 +464,7 @@ gst_d3d11_vp8_dec_get_output_view_from_picture (GstD3D11Vp8Dec * self,
view = view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
view_buffer); view_buffer, view_id);
if (!view) { if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL; return NULL;
@ -480,7 +480,7 @@ gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder); GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture, NULL);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -556,45 +556,43 @@ gst_d3d11_vp8_dec_copy_reference_frames (GstD3D11Vp8Dec * self,
{ {
GstVp8Decoder *decoder = GST_VP8_DECODER (self); GstVp8Decoder *decoder = GST_VP8_DECODER (self);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
if (decoder->alt_ref_picture) { if (decoder->alt_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->alt_ref_picture); decoder->alt_ref_picture, &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle"); GST_ERROR_OBJECT (self, "picture does not have output view handle");
return; return;
} }
params->alt_fb_idx.Index7Bits = params->alt_fb_idx.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
} else { } else {
params->alt_fb_idx.bPicEntry = 0xff; params->alt_fb_idx.bPicEntry = 0xff;
} }
if (decoder->golden_ref_picture) { if (decoder->golden_ref_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->golden_ref_picture); decoder->golden_ref_picture, &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle"); GST_ERROR_OBJECT (self, "picture does not have output view handle");
return; return;
} }
params->gld_fb_idx.Index7Bits = params->gld_fb_idx.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
} else { } else {
params->gld_fb_idx.bPicEntry = 0xff; params->gld_fb_idx.bPicEntry = 0xff;
} }
if (decoder->last_picture) { if (decoder->last_picture) {
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
decoder->last_picture); decoder->last_picture, &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle"); GST_ERROR_OBJECT (self, "picture does not have output view handle");
return; return;
} }
params->lst_fb_idx.Index7Bits = params->lst_fb_idx.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
} else { } else {
params->lst_fb_idx.bPicEntry = 0xff; params->lst_fb_idx.bPicEntry = 0xff;
} }
@ -811,9 +809,11 @@ gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder); GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
DXVA_PicParams_VP8 pic_params = { 0, }; DXVA_PicParams_VP8 pic_params = { 0, };
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr; const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
view = gst_d3d11_vp8_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_vp8_dec_get_output_view_from_picture (self,
picture, &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -822,8 +822,7 @@ gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
pic_params.first_part_size = frame_hdr->first_part_size; pic_params.first_part_size = frame_hdr->first_part_size;
pic_params.width = self->width; pic_params.width = self->width;
pic_params.height = self->height; pic_params.height = self->height;
pic_params.CurrPic.Index7Bits = pic_params.CurrPic.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
pic_params.StatusReportFeedbackNumber = 1; pic_params.StatusReportFeedbackNumber = 1;
gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, &pic_params); gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, &pic_params);

View file

@ -542,7 +542,7 @@ error:
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self, gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
GstVp9Picture * picture) GstVp9Picture * picture, guint8 * view_id)
{ {
GstBuffer *view_buffer; GstBuffer *view_buffer;
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
@ -555,7 +555,7 @@ gst_d3d11_vp9_dec_get_output_view_from_picture (GstD3D11Vp9Dec * self,
view = view =
gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder,
view_buffer); view_buffer, view_id);
if (!view) { if (!view) {
GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
return NULL; return NULL;
@ -571,7 +571,7 @@ gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder); GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture, NULL);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
@ -669,15 +669,16 @@ gst_d3d11_vp9_dec_copy_reference_frames (GstD3D11Vp9Dec * self,
if (dpb->pic_list[i]) { if (dpb->pic_list[i]) {
GstVp9Picture *other_pic = dpb->pic_list[i]; GstVp9Picture *other_pic = dpb->pic_list[i];
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, other_pic); view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, other_pic,
&view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "picture does not have output view handle"); GST_ERROR_OBJECT (self, "picture does not have output view handle");
return; return;
} }
params->ref_frame_map[i].Index7Bits = params->ref_frame_map[i].Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
params->ref_frame_coded_width[i] = picture->frame_hdr.width; params->ref_frame_coded_width[i] = picture->frame_hdr.width;
params->ref_frame_coded_height[i] = picture->frame_hdr.height; params->ref_frame_coded_height[i] = picture->frame_hdr.height;
} else { } else {
@ -1034,15 +1035,16 @@ gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder); GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
DXVA_PicParams_VP9 pic_params = { 0, }; DXVA_PicParams_VP9 pic_params = { 0, };
ID3D11VideoDecoderOutputView *view; ID3D11VideoDecoderOutputView *view;
guint8 view_id = 0xff;
view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture); view = gst_d3d11_vp9_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return FALSE;
} }
pic_params.CurrPic.Index7Bits = pic_params.CurrPic.Index7Bits = view_id;
gst_d3d11_decoder_get_output_view_index (view);
pic_params.uncompressed_header_size_byte_aligned = pic_params.uncompressed_header_size_byte_aligned =
picture->frame_hdr.frame_header_length_in_bytes; picture->frame_hdr.frame_header_length_in_bytes;
pic_params.first_partition_size = picture->frame_hdr.first_partition_size; pic_params.first_partition_size = picture->frame_hdr.first_partition_size;