mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-22 09:41:07 +00:00
d3d11decoder: Port to GstDxva
Use new DXVA baseclass Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/4161>
This commit is contained in:
parent
729c11cb0c
commit
2c058b3b99
15 changed files with 771 additions and 4023 deletions
File diff suppressed because it is too large
Load diff
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_AV1_DEC_H__
|
||||
#define __GST_D3D11_AV1_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -29,5 +28,3 @@ void gst_d3d11_av1_dec_register (GstPlugin * plugin,
|
|||
guint rank);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_AV1_DEC_H__ */
|
||||
|
|
|
@ -207,12 +207,6 @@ private:
|
|||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_DEVICE,
|
||||
};
|
||||
|
||||
struct _GstD3D11Decoder
|
||||
{
|
||||
GstObject parent;
|
||||
|
@ -229,7 +223,7 @@ struct _GstD3D11Decoder
|
|||
|
||||
GstVideoInfo info;
|
||||
GstVideoInfo output_info;
|
||||
GstDXVACodec codec;
|
||||
GstDxvaCodec codec;
|
||||
gint offset_x;
|
||||
gint offset_y;
|
||||
gint coded_width;
|
||||
|
@ -272,11 +266,6 @@ struct _GstD3D11Decoder
|
|||
guint timer_resolution;
|
||||
};
|
||||
|
||||
static void gst_d3d11_decoder_constructed (GObject * object);
|
||||
static void gst_d3d11_decoder_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_decoder_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_decoder_dispose (GObject * obj);
|
||||
static void gst_d3d11_decoder_finalize (GObject * obj);
|
||||
static gboolean gst_d3d11_decoder_can_direct_render (GstD3D11Decoder * decoder,
|
||||
|
@ -291,90 +280,14 @@ gst_d3d11_decoder_class_init (GstD3D11DecoderClass * klass)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
gobject_class->constructed = gst_d3d11_decoder_constructed;
|
||||
gobject_class->set_property = gst_d3d11_decoder_set_property;
|
||||
gobject_class->get_property = gst_d3d11_decoder_get_property;
|
||||
gobject_class->dispose = gst_d3d11_decoder_dispose;
|
||||
gobject_class->finalize = gst_d3d11_decoder_finalize;
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_DEVICE,
|
||||
g_param_spec_object ("device", "Device",
|
||||
"D3D11 Devicd to use", GST_TYPE_D3D11_DEVICE,
|
||||
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY |
|
||||
G_PARAM_STATIC_STRINGS)));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_decoder_init (GstD3D11Decoder * self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_decoder_constructed (GObject * object)
|
||||
{
|
||||
GstD3D11Decoder *self = GST_D3D11_DECODER (object);
|
||||
ID3D11VideoDevice *video_device;
|
||||
ID3D11VideoContext *video_context;
|
||||
|
||||
if (!self->device) {
|
||||
GST_ERROR_OBJECT (self, "No D3D11Device available");
|
||||
return;
|
||||
}
|
||||
|
||||
video_device = gst_d3d11_device_get_video_device_handle (self->device);
|
||||
if (!video_device) {
|
||||
GST_WARNING_OBJECT (self, "ID3D11VideoDevice is not available");
|
||||
return;
|
||||
}
|
||||
|
||||
video_context = gst_d3d11_device_get_video_context_handle (self->device);
|
||||
if (!video_context) {
|
||||
GST_WARNING_OBJECT (self, "ID3D11VideoContext is not available");
|
||||
return;
|
||||
}
|
||||
|
||||
self->video_device = video_device;
|
||||
video_device->AddRef ();
|
||||
|
||||
self->video_context = video_context;
|
||||
video_context->AddRef ();
|
||||
|
||||
BOOL ret = QueryPerformanceFrequency (&self->frequency);
|
||||
g_assert (ret);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_decoder_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstD3D11Decoder *self = GST_D3D11_DECODER (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DEVICE:
|
||||
self->device = (GstD3D11Device *) g_value_dup_object (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_decoder_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstD3D11Decoder *self = GST_D3D11_DECODER (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DEVICE:
|
||||
g_value_set_object (value, self->device);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
QueryPerformanceFrequency (&self->frequency);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -439,37 +352,43 @@ gst_d3d11_decoder_finalize (GObject * obj)
|
|||
}
|
||||
|
||||
GstD3D11Decoder *
|
||||
gst_d3d11_decoder_new (GstD3D11Device * device, GstDXVACodec codec)
|
||||
gst_d3d11_decoder_new (GstD3D11Device * device, GstDxvaCodec codec)
|
||||
{
|
||||
GstD3D11Decoder *self;
|
||||
ID3D11VideoDevice *video_device;
|
||||
ID3D11VideoContext *video_context;
|
||||
|
||||
g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), nullptr);
|
||||
g_return_val_if_fail (codec > GST_DXVA_CODEC_NONE, nullptr);
|
||||
g_return_val_if_fail (codec < GST_DXVA_CODEC_LAST, nullptr);
|
||||
|
||||
self = (GstD3D11Decoder *)
|
||||
g_object_new (GST_TYPE_D3D11_DECODER, "device", device, NULL);
|
||||
|
||||
if (!self->video_device || !self->video_context) {
|
||||
gst_object_unref (self);
|
||||
return NULL;
|
||||
video_device = gst_d3d11_device_get_video_device_handle (device);
|
||||
if (!video_device) {
|
||||
GST_WARNING_OBJECT (device, "ID3D11VideoDevice is not available");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
video_context = gst_d3d11_device_get_video_context_handle (device);
|
||||
if (!video_context) {
|
||||
GST_WARNING_OBJECT (device, "ID3D11VideoContext is not available");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
self = (GstD3D11Decoder *) g_object_new (GST_TYPE_D3D11_DECODER, nullptr);
|
||||
|
||||
self->device = (GstD3D11Device *) gst_object_ref (device);
|
||||
self->codec = codec;
|
||||
self->video_device = video_device;
|
||||
video_device->AddRef ();
|
||||
|
||||
self->video_context = video_context;
|
||||
video_context->AddRef ();
|
||||
|
||||
gst_object_ref_sink (self);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_d3d11_decoder_is_configured (GstD3D11Decoder * decoder)
|
||||
{
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
|
||||
|
||||
return decoder->configured;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_decoder_ensure_output_view (GstD3D11Decoder * self,
|
||||
GstBuffer * buffer)
|
||||
|
@ -606,35 +525,9 @@ error:
|
|||
return FALSE;
|
||||
}
|
||||
|
||||
static const gchar *
|
||||
gst_dxva_codec_to_string (GstDXVACodec codec)
|
||||
{
|
||||
switch (codec) {
|
||||
case GST_DXVA_CODEC_NONE:
|
||||
return "none";
|
||||
case GST_DXVA_CODEC_H264:
|
||||
return "H.264";
|
||||
case GST_DXVA_CODEC_VP9:
|
||||
return "VP9";
|
||||
case GST_DXVA_CODEC_H265:
|
||||
return "H.265";
|
||||
case GST_DXVA_CODEC_VP8:
|
||||
return "VP8";
|
||||
case GST_DXVA_CODEC_MPEG2:
|
||||
return "MPEG2";
|
||||
case GST_DXVA_CODEC_AV1:
|
||||
return "AV1";
|
||||
default:
|
||||
g_assert_not_reached ();
|
||||
break;
|
||||
}
|
||||
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Device * device,
|
||||
GstDXVACodec codec, GstVideoFormat format, const GUID ** selected_profile)
|
||||
GstDxvaCodec codec, GstVideoFormat format, const GUID ** selected_profile)
|
||||
{
|
||||
GUID *guid_list = nullptr;
|
||||
const GUID *profile = nullptr;
|
||||
|
@ -780,7 +673,7 @@ out:
|
|||
return ret;
|
||||
}
|
||||
|
||||
gboolean
|
||||
GstFlowReturn
|
||||
gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * out_info,
|
||||
gint offset_x, gint offset_y, gint coded_width, gint coded_height,
|
||||
|
@ -788,13 +681,14 @@ gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
|||
{
|
||||
GstD3D11Format d3d11_format;
|
||||
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
|
||||
g_return_val_if_fail (out_info != NULL, FALSE);
|
||||
g_return_val_if_fail (input_state != NULL, FALSE);
|
||||
g_return_val_if_fail (coded_width >= GST_VIDEO_INFO_WIDTH (out_info), FALSE);
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (out_info != NULL, GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (input_state != NULL, GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (coded_width >= GST_VIDEO_INFO_WIDTH (out_info),
|
||||
GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (coded_height >= GST_VIDEO_INFO_HEIGHT (out_info),
|
||||
FALSE);
|
||||
g_return_val_if_fail (dpb_size > 0, FALSE);
|
||||
GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (dpb_size > 0, GST_FLOW_ERROR);
|
||||
|
||||
gst_d3d11_decoder_reset (decoder);
|
||||
|
||||
|
@ -803,7 +697,7 @@ gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
|||
d3d11_format.dxgi_format == DXGI_FORMAT_UNKNOWN) {
|
||||
GST_ERROR_OBJECT (decoder, "Could not determine dxgi format from %s",
|
||||
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
|
||||
return FALSE;
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
/* Additional 2 frames to help zero-copying */
|
||||
|
@ -825,7 +719,7 @@ gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
|||
else
|
||||
decoder->need_crop = FALSE;
|
||||
|
||||
return TRUE;
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
|
@ -1225,9 +1119,95 @@ gst_d3d11_decoder_submit_decoder_buffers (GstD3D11Decoder * decoder,
|
|||
return TRUE;
|
||||
}
|
||||
|
||||
static ID3D11VideoDecoderOutputView *
|
||||
gst_d3d11_decoder_get_output_view_from_picture (GstD3D11Decoder * self,
|
||||
GstCodecPicture * picture, guint8 * index)
|
||||
{
|
||||
GstMemory *mem;
|
||||
GstD3D11Memory *dmem;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
GstBuffer *buffer;
|
||||
|
||||
if (index)
|
||||
*index = 0xff;
|
||||
|
||||
buffer = (GstBuffer *) gst_codec_picture_get_user_data (picture);
|
||||
if (!buffer) {
|
||||
GST_DEBUG_OBJECT (self, "picture without attached user data");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
mem = gst_buffer_peek_memory (buffer, 0);
|
||||
if (!gst_is_d3d11_memory (mem)) {
|
||||
GST_WARNING_OBJECT (self, "Not a d3d11 memory");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
dmem = (GstD3D11Memory *) mem;
|
||||
view = gst_d3d11_memory_get_decoder_output_view (dmem, self->video_device,
|
||||
self->decoder_handle, &self->decoder_profile);
|
||||
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "Decoder output view is unavailable");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (index) {
|
||||
if (self->use_array_of_texture) {
|
||||
ID3D11Resource *texture;
|
||||
ComPtr < IGstD3D11DecoderViewData > data;
|
||||
UINT size;
|
||||
|
||||
texture = gst_d3d11_memory_get_resource_handle (dmem);
|
||||
size = sizeof (IGstD3D11DecoderViewData *);
|
||||
|
||||
texture->GetPrivateData (IID_GST_D3D11_DECODER_VIEW_DATA,
|
||||
&size, data.GetAddressOf ());
|
||||
|
||||
if (!data) {
|
||||
GST_ERROR_OBJECT (self, "memory has no private data");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
*index = data->GetViewIndex ();
|
||||
} else {
|
||||
*index = gst_d3d11_memory_get_subresource_index (dmem);
|
||||
}
|
||||
}
|
||||
|
||||
return view;
|
||||
}
|
||||
|
||||
guint8
|
||||
gst_d3d11_decoder_get_picture_id (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
guint8 id = 0xff;
|
||||
|
||||
if (!picture)
|
||||
return 0xff;
|
||||
|
||||
if (!gst_d3d11_decoder_get_output_view_from_picture (decoder, picture, &id))
|
||||
return 0xff;
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
GstFlowReturn
|
||||
gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture, GstD3D11DecodeInputStreamArgs * input_args)
|
||||
gst_d3d11_decoder_start_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
if (!gst_d3d11_decoder_get_output_view_from_picture (decoder,
|
||||
picture, picture_id)) {
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
GstFlowReturn
|
||||
gst_d3d11_decoder_end_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture, const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
ID3D11VideoDecoderOutputView *output_view;
|
||||
guint d3d11_buffer_size;
|
||||
|
@ -1238,7 +1218,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (picture != nullptr, GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (input_args != nullptr, GST_FLOW_ERROR);
|
||||
g_return_val_if_fail (args != nullptr, GST_FLOW_ERROR);
|
||||
|
||||
output_view = gst_d3d11_decoder_get_output_view_from_picture (decoder,
|
||||
picture, nullptr);
|
||||
|
@ -1250,21 +1230,21 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
memset (buffer_desc, 0, sizeof (buffer_desc));
|
||||
|
||||
buffer_desc[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
|
||||
buffer_desc[0].DataSize = input_args->picture_params_size;
|
||||
buffer_desc[0].DataSize = args->picture_params_size;
|
||||
|
||||
buffer_desc[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
|
||||
buffer_desc[1].DataSize = input_args->slice_control_size;
|
||||
buffer_desc[1].DataSize = args->slice_control_size;
|
||||
|
||||
buffer_desc[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
|
||||
buffer_desc[2].DataOffset = 0;
|
||||
buffer_desc[2].DataSize = input_args->bitstream_size;
|
||||
buffer_desc[2].DataSize = args->bitstream_size;
|
||||
|
||||
buffer_desc_size = 3;
|
||||
if (input_args->inverse_quantization_matrix &&
|
||||
input_args->inverse_quantization_matrix_size > 0) {
|
||||
if (args->inverse_quantization_matrix &&
|
||||
args->inverse_quantization_matrix_size > 0) {
|
||||
buffer_desc[3].BufferType =
|
||||
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX;
|
||||
buffer_desc[3].DataSize = input_args->inverse_quantization_matrix_size;
|
||||
buffer_desc[3].DataSize = args->inverse_quantization_matrix_size;
|
||||
buffer_desc_size++;
|
||||
}
|
||||
|
||||
|
@ -1281,7 +1261,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (d3d11_buffer_size < input_args->picture_params_size) {
|
||||
if (d3d11_buffer_size < args->picture_params_size) {
|
||||
GST_ERROR_OBJECT (decoder,
|
||||
"Too small picture param buffer size %d", d3d11_buffer_size);
|
||||
|
||||
|
@ -1290,8 +1270,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
memcpy (d3d11_buffer, input_args->picture_params,
|
||||
input_args->picture_params_size);
|
||||
memcpy (d3d11_buffer, args->picture_params, args->picture_params_size);
|
||||
|
||||
if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
|
||||
D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) {
|
||||
|
@ -1306,7 +1285,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (d3d11_buffer_size < input_args->slice_control_size) {
|
||||
if (d3d11_buffer_size < args->slice_control_size) {
|
||||
GST_ERROR_OBJECT (decoder,
|
||||
"Too small slice control buffer size %d", d3d11_buffer_size);
|
||||
|
||||
|
@ -1315,8 +1294,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
memcpy (d3d11_buffer,
|
||||
input_args->slice_control, input_args->slice_control_size);
|
||||
memcpy (d3d11_buffer, args->slice_control, args->slice_control_size);
|
||||
|
||||
if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
|
||||
D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) {
|
||||
|
@ -1331,7 +1309,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (d3d11_buffer_size < input_args->bitstream_size) {
|
||||
if (d3d11_buffer_size < args->bitstream_size) {
|
||||
GST_ERROR_OBJECT (decoder, "Too small bitstream buffer size %d",
|
||||
d3d11_buffer_size);
|
||||
|
||||
|
@ -1340,7 +1318,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
memcpy (d3d11_buffer, input_args->bitstream, input_args->bitstream_size);
|
||||
memcpy (d3d11_buffer, args->bitstream, args->bitstream_size);
|
||||
|
||||
if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
|
||||
D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) {
|
||||
|
@ -1348,7 +1326,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (input_args->inverse_quantization_matrix_size > 0) {
|
||||
if (args->inverse_quantization_matrix_size > 0) {
|
||||
if (!gst_d3d11_decoder_get_decoder_buffer (decoder,
|
||||
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX,
|
||||
&d3d11_buffer_size, &d3d11_buffer)) {
|
||||
|
@ -1357,7 +1335,7 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
if (d3d11_buffer_size < input_args->inverse_quantization_matrix_size) {
|
||||
if (d3d11_buffer_size < args->inverse_quantization_matrix_size) {
|
||||
GST_ERROR_OBJECT (decoder,
|
||||
"Too small inverse quantization matrix buffer buffer %d",
|
||||
d3d11_buffer_size);
|
||||
|
@ -1367,8 +1345,8 @@ gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
|||
goto error;
|
||||
}
|
||||
|
||||
memcpy (d3d11_buffer, input_args->inverse_quantization_matrix,
|
||||
input_args->inverse_quantization_matrix_size);
|
||||
memcpy (d3d11_buffer, args->inverse_quantization_matrix,
|
||||
args->inverse_quantization_matrix_size);
|
||||
|
||||
if (!gst_d3d11_decoder_release_decoder_buffer (decoder,
|
||||
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) {
|
||||
|
@ -1446,66 +1424,19 @@ gst_d3d11_decoder_new_picture (GstD3D11Decoder * decoder,
|
|||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
ID3D11VideoDecoderOutputView *
|
||||
gst_d3d11_decoder_get_output_view_from_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * index)
|
||||
GstFlowReturn
|
||||
gst_d3d11_decoder_duplicate_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * src, GstCodecPicture * dst)
|
||||
{
|
||||
GstMemory *mem;
|
||||
GstD3D11Memory *dmem;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
GstBuffer *buffer;
|
||||
GstBuffer *buf = (GstBuffer *) gst_codec_picture_get_user_data (src);
|
||||
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), nullptr);
|
||||
g_return_val_if_fail (picture, nullptr);
|
||||
if (!buf)
|
||||
return GST_FLOW_ERROR;
|
||||
|
||||
if (index)
|
||||
*index = 0xff;
|
||||
gst_codec_picture_set_user_data (dst, gst_buffer_ref (buf),
|
||||
(GDestroyNotify) gst_buffer_unref);
|
||||
|
||||
buffer = (GstBuffer *) gst_codec_picture_get_user_data (picture);
|
||||
if (!buffer) {
|
||||
GST_DEBUG_OBJECT (decoder, "picture without attached user data");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
mem = gst_buffer_peek_memory (buffer, 0);
|
||||
if (!gst_is_d3d11_memory (mem)) {
|
||||
GST_WARNING_OBJECT (decoder, "Not a d3d11 memory");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
dmem = (GstD3D11Memory *) mem;
|
||||
view = gst_d3d11_memory_get_decoder_output_view (dmem, decoder->video_device,
|
||||
decoder->decoder_handle, &decoder->decoder_profile);
|
||||
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (decoder, "Decoder output view is unavailable");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (index) {
|
||||
if (decoder->use_array_of_texture) {
|
||||
ID3D11Resource *texture;
|
||||
ComPtr < IGstD3D11DecoderViewData > data;
|
||||
UINT size;
|
||||
|
||||
texture = gst_d3d11_memory_get_resource_handle (dmem);
|
||||
size = sizeof (IGstD3D11DecoderViewData *);
|
||||
|
||||
texture->GetPrivateData (IID_GST_D3D11_DECODER_VIEW_DATA,
|
||||
&size, data.GetAddressOf ());
|
||||
|
||||
if (!data) {
|
||||
GST_ERROR_OBJECT (decoder, "memory has no private data");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
*index = data->GetViewIndex ();
|
||||
} else {
|
||||
*index = gst_d3d11_memory_get_subresource_index (dmem);
|
||||
}
|
||||
}
|
||||
|
||||
return view;
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1617,7 +1548,7 @@ gst_d3d11_decoder_crop_and_copy_buffer (GstD3D11Decoder * self,
|
|||
GstFlowReturn
|
||||
gst_d3d11_decoder_output_picture (GstD3D11Decoder * decoder,
|
||||
GstVideoDecoder * videodec, GstVideoCodecFrame * frame,
|
||||
GstCodecPicture * picture, guint buffer_flags,
|
||||
GstCodecPicture * picture, GstVideoBufferFlags buffer_flags,
|
||||
gint display_width, gint display_height)
|
||||
{
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
@ -2001,18 +1932,28 @@ gst_d3d11_decoder_decide_allocation (GstD3D11Decoder * decoder,
|
|||
return TRUE;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_d3d11_decoder_set_flushing (GstD3D11Decoder * decoder,
|
||||
GstVideoDecoder * videodec, gboolean flushing)
|
||||
static void
|
||||
gst_d3d11_decoder_set_flushing (GstD3D11Decoder * self, gboolean flushing)
|
||||
{
|
||||
g_return_val_if_fail (GST_IS_D3D11_DECODER (decoder), FALSE);
|
||||
GstD3D11SRWLockGuard lk (&self->lock);
|
||||
if (self->internal_pool)
|
||||
gst_buffer_pool_set_flushing (self->internal_pool, flushing);
|
||||
self->flushing = flushing;
|
||||
}
|
||||
|
||||
GstD3D11SRWLockGuard lk (&decoder->lock);
|
||||
if (decoder->internal_pool)
|
||||
gst_buffer_pool_set_flushing (decoder->internal_pool, flushing);
|
||||
decoder->flushing = flushing;
|
||||
|
||||
return TRUE;
|
||||
void
|
||||
gst_d3d11_decoder_sink_event (GstD3D11Decoder * decoder, GstEvent * event)
|
||||
{
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
gst_d3d11_decoder_set_flushing (decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
gst_d3d11_decoder_set_flushing (decoder, FALSE);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static gboolean
|
||||
|
@ -2257,7 +2198,7 @@ struct _GstD3D11DecoderClassData
|
|||
* Returns: (transfer full): the new #GstD3D11DecoderClassData
|
||||
*/
|
||||
GstD3D11DecoderClassData *
|
||||
gst_d3d11_decoder_class_data_new (GstD3D11Device * device, GstDXVACodec codec,
|
||||
gst_d3d11_decoder_class_data_new (GstD3D11Device * device, GstDxvaCodec codec,
|
||||
GstCaps * sink_caps, GstCaps * src_caps, guint max_resolution)
|
||||
{
|
||||
GstD3D11DecoderClassData *ret;
|
||||
|
@ -2342,7 +2283,7 @@ gst_d3d11_decoder_class_data_free (GstD3D11DecoderClassData * data)
|
|||
|
||||
typedef struct _GstD3D11DecoderDocCaps
|
||||
{
|
||||
GstDXVACodec codec;
|
||||
GstDxvaCodec codec;
|
||||
const gchar *sink_caps;
|
||||
const gchar *src_caps;
|
||||
} GstD3D11DecoderDocCaps;
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <gst/video/video.h>
|
||||
#include <gst/d3d11/gstd3d11.h>
|
||||
#include <gst/codecs/gstcodecpicture.h>
|
||||
#include <gst/dxva/gstdxva.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
|
@ -33,49 +34,73 @@ G_DECLARE_FINAL_TYPE (GstD3D11Decoder,
|
|||
|
||||
typedef struct _GstD3D11DecoderClassData GstD3D11DecoderClassData;
|
||||
|
||||
typedef enum
|
||||
struct GstD3D11DecoderSubClassData
|
||||
{
|
||||
GST_DXVA_CODEC_NONE,
|
||||
GST_DXVA_CODEC_MPEG2,
|
||||
GST_DXVA_CODEC_H264,
|
||||
GST_DXVA_CODEC_H265,
|
||||
GST_DXVA_CODEC_VP8,
|
||||
GST_DXVA_CODEC_VP9,
|
||||
GST_DXVA_CODEC_AV1,
|
||||
|
||||
/* the last of supported codec */
|
||||
GST_DXVA_CODEC_LAST
|
||||
} GstDXVACodec;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
GstDXVACodec codec;
|
||||
GstDxvaCodec codec;
|
||||
gint64 adapter_luid;
|
||||
guint device_id;
|
||||
guint vendor_id;
|
||||
} GstD3D11DecoderSubClassData;
|
||||
};
|
||||
|
||||
typedef struct _GstD3D11DecodeInputStreamArgs
|
||||
{
|
||||
gpointer picture_params;
|
||||
gsize picture_params_size;
|
||||
#define GST_D3D11_DECODER_DEFINE_TYPE(ModuleObjName,module_obj_name,MODULE,OBJ_NAME,ParentName) \
|
||||
static GstElementClass *parent_class = NULL; \
|
||||
typedef struct _##ModuleObjName { \
|
||||
ParentName parent; \
|
||||
GstD3D11Device *device; \
|
||||
GstD3D11Decoder *decoder; \
|
||||
} ModuleObjName;\
|
||||
typedef struct _##ModuleObjName##Class { \
|
||||
ParentName##Class parent_class; \
|
||||
GstD3D11DecoderSubClassData class_data; \
|
||||
} ModuleObjName##Class; \
|
||||
static inline ModuleObjName * MODULE##_##OBJ_NAME (gpointer ptr) { \
|
||||
return (ModuleObjName *) (ptr); \
|
||||
} \
|
||||
static inline ModuleObjName##Class * MODULE##_##OBJ_NAME##_GET_CLASS (gpointer ptr) { \
|
||||
return G_TYPE_INSTANCE_GET_CLASS ((ptr),G_TYPE_FROM_INSTANCE(ptr),ModuleObjName##Class); \
|
||||
} \
|
||||
static void module_obj_name##_get_property (GObject * object, \
|
||||
guint prop_id, GValue * value, GParamSpec * pspec); \
|
||||
static void module_obj_name##_set_context (GstElement * element, \
|
||||
GstContext * context); \
|
||||
static gboolean module_obj_name##_open (GstVideoDecoder * decoder); \
|
||||
static gboolean module_obj_name##_close (GstVideoDecoder * decoder); \
|
||||
static gboolean module_obj_name##_negotiate (GstVideoDecoder * decoder); \
|
||||
static gboolean module_obj_name##_decide_allocation (GstVideoDecoder * decoder, \
|
||||
GstQuery * query); \
|
||||
static gboolean module_obj_name##_sink_query (GstVideoDecoder * decoder, \
|
||||
GstQuery * query); \
|
||||
static gboolean module_obj_name##_src_query (GstVideoDecoder * decoder, \
|
||||
GstQuery * query); \
|
||||
static gboolean module_obj_name##_sink_event (GstVideoDecoder * decoder, \
|
||||
GstEvent * event); \
|
||||
static GstFlowReturn module_obj_name##_configure (ParentName * decoder, \
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info, \
|
||||
gint crop_x, gint crop_y, \
|
||||
gint coded_width, gint coded_height, gint max_dpb_size); \
|
||||
static GstFlowReturn module_obj_name##_new_picture (ParentName * decoder, \
|
||||
GstCodecPicture * picture); \
|
||||
static guint8 module_obj_name##_get_picture_id (ParentName * decoder, \
|
||||
GstCodecPicture * picture); \
|
||||
static GstFlowReturn module_obj_name##_start_picture (ParentName * decoder, \
|
||||
GstCodecPicture * picture, guint8 * picture_id); \
|
||||
static GstFlowReturn module_obj_name##_end_picture (ParentName * decoder, \
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics, \
|
||||
const GstDxvaDecodingArgs * args); \
|
||||
static GstFlowReturn module_obj_name##_output_picture (ParentName * decoder, \
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture, \
|
||||
GstVideoBufferFlags buffer_flags, \
|
||||
gint display_width, gint display_height);
|
||||
|
||||
gpointer slice_control;
|
||||
gsize slice_control_size;
|
||||
|
||||
gpointer bitstream;
|
||||
gsize bitstream_size;
|
||||
|
||||
gpointer inverse_quantization_matrix;
|
||||
gsize inverse_quantization_matrix_size;
|
||||
} GstD3D11DecodeInputStreamArgs;
|
||||
#define GST_D3D11_DECODER_DEFINE_TYPE_FULL(ModuleObjName,module_obj_name,MODULE,OBJ_NAME,ParentName) \
|
||||
GST_D3D11_DECODER_DEFINE_TYPE(ModuleObjName,module_obj_name,MODULE,OBJ_NAME,ParentName); \
|
||||
static GstFlowReturn module_obj_name##_duplicate_picture (ParentName * decoder, \
|
||||
GstCodecPicture * src, GstCodecPicture * dst);
|
||||
|
||||
GstD3D11Decoder * gst_d3d11_decoder_new (GstD3D11Device * device,
|
||||
GstDXVACodec codec);
|
||||
GstDxvaCodec codec);
|
||||
|
||||
gboolean gst_d3d11_decoder_is_configured (GstD3D11Decoder * decoder);
|
||||
|
||||
gboolean gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
||||
GstFlowReturn gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
||||
GstVideoCodecState * input_state,
|
||||
const GstVideoInfo * out_info,
|
||||
gint offset_x,
|
||||
|
@ -84,23 +109,30 @@ gboolean gst_d3d11_decoder_configure (GstD3D11Decoder * decoder,
|
|||
gint coded_height,
|
||||
guint dpb_size);
|
||||
|
||||
GstFlowReturn gst_d3d11_decoder_decode_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture,
|
||||
GstD3D11DecodeInputStreamArgs * input_args);
|
||||
|
||||
GstFlowReturn gst_d3d11_decoder_new_picture (GstD3D11Decoder * decoder,
|
||||
GstVideoDecoder * videodec,
|
||||
GstCodecPicture * picture);
|
||||
|
||||
ID3D11VideoDecoderOutputView * gst_d3d11_decoder_get_output_view_from_picture (GstD3D11Decoder * decoder,
|
||||
GstFlowReturn gst_d3d11_decoder_duplicate_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * src,
|
||||
GstCodecPicture * dst);
|
||||
|
||||
guint8 gst_d3d11_decoder_get_picture_id (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture);
|
||||
|
||||
GstFlowReturn gst_d3d11_decoder_start_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture,
|
||||
guint8 * view_id);
|
||||
guint8 * picture_id);
|
||||
|
||||
GstFlowReturn gst_d3d11_decoder_end_picture (GstD3D11Decoder * decoder,
|
||||
GstCodecPicture * picture,
|
||||
const GstDxvaDecodingArgs * args);
|
||||
|
||||
GstFlowReturn gst_d3d11_decoder_output_picture (GstD3D11Decoder * decoder,
|
||||
GstVideoDecoder * videodec,
|
||||
GstVideoCodecFrame * frame,
|
||||
GstCodecPicture * picture,
|
||||
guint buffer_flags,
|
||||
GstVideoBufferFlags buffer_flags,
|
||||
gint display_width,
|
||||
gint display_height);
|
||||
|
||||
|
@ -111,26 +143,13 @@ gboolean gst_d3d11_decoder_decide_allocation (GstD3D11Decoder * decod
|
|||
GstVideoDecoder * videodec,
|
||||
GstQuery * query);
|
||||
|
||||
gboolean gst_d3d11_decoder_set_flushing (GstD3D11Decoder * decoder,
|
||||
GstVideoDecoder * videodec,
|
||||
gboolean flushing);
|
||||
|
||||
/* Utils for class registration */
|
||||
typedef struct _GstDXVAResolution
|
||||
{
|
||||
guint width;
|
||||
guint height;
|
||||
} GstDXVAResolution;
|
||||
|
||||
static const GstDXVAResolution gst_dxva_resolutions[] = {
|
||||
{1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
|
||||
{7680, 4320}, {8192, 4320}, {15360, 8640}, {16384, 8640}
|
||||
};
|
||||
void gst_d3d11_decoder_sink_event (GstD3D11Decoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
gboolean gst_d3d11_decoder_util_is_legacy_device (GstD3D11Device * device);
|
||||
|
||||
gboolean gst_d3d11_decoder_get_supported_decoder_profile (GstD3D11Device * device,
|
||||
GstDXVACodec codec,
|
||||
GstDxvaCodec codec,
|
||||
GstVideoFormat format,
|
||||
const GUID ** selected_profile);
|
||||
|
||||
|
@ -145,7 +164,7 @@ gboolean gst_d3d11_decoder_supports_resolution (GstD3D11Device * device
|
|||
guint height);
|
||||
|
||||
GstD3D11DecoderClassData * gst_d3d11_decoder_class_data_new (GstD3D11Device * device,
|
||||
GstDXVACodec codec,
|
||||
GstDxvaCodec codec,
|
||||
GstCaps * sink_caps,
|
||||
GstCaps * src_caps,
|
||||
guint max_resolution);
|
||||
|
|
|
@ -15,36 +15,6 @@
|
|||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
||||
* Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* NOTE: some of implementations are copied/modified from Chromium code
|
||||
*
|
||||
* Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -67,99 +37,13 @@
|
|||
#endif
|
||||
|
||||
#include "gstd3d11h264dec.h"
|
||||
|
||||
#include <gst/codecs/gsth264decoder.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
|
||||
/* HACK: to expose dxva data structure on UWP */
|
||||
#ifdef WINAPI_PARTITION_DESKTOP
|
||||
#undef WINAPI_PARTITION_DESKTOP
|
||||
#endif
|
||||
#define WINAPI_PARTITION_DESKTOP 1
|
||||
#include <d3d9.h>
|
||||
#include <dxva.h>
|
||||
#include <gst/dxva/gstdxvah264decoder.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h264_dec_debug);
|
||||
#define GST_CAT_DEFAULT gst_d3d11_h264_dec_debug
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
typedef struct _GstD3D11H264DecInner
|
||||
{
|
||||
GstD3D11Device *device = nullptr;
|
||||
GstD3D11Decoder *d3d11_decoder = nullptr;
|
||||
|
||||
DXVA_PicParams_H264 pic_params;
|
||||
DXVA_Qmatrix_H264 iq_matrix;
|
||||
|
||||
std::vector<DXVA_Slice_H264_Short> slice_list;
|
||||
std::vector<guint8> bitstream_buffer;
|
||||
|
||||
gint crop_x = 0;
|
||||
gint crop_y = 0;
|
||||
gint width = 0;
|
||||
gint height = 0;
|
||||
gint coded_width = 0;
|
||||
gint coded_height = 0;
|
||||
gint bitdepth = 0;
|
||||
guint8 chroma_format_idc = 0;
|
||||
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
gboolean interlaced = FALSE;
|
||||
gint max_dpb_size = 0;
|
||||
} GstD3D11H264DecInner;
|
||||
|
||||
/* *INDENT-ON* */
|
||||
typedef struct _GstD3D11H264Dec
|
||||
{
|
||||
GstH264Decoder parent;
|
||||
GstD3D11H264DecInner *inner;
|
||||
} GstD3D11H264Dec;
|
||||
|
||||
typedef struct _GstD3D11H264DecClass
|
||||
{
|
||||
GstH264DecoderClass parent_class;
|
||||
GstD3D11DecoderSubClassData class_data;
|
||||
} GstD3D11H264DecClass;
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
#define GST_D3D11_H264_DEC(object) ((GstD3D11H264Dec *) (object))
|
||||
#define GST_D3D11_H264_DEC_GET_CLASS(object) \
|
||||
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11H264DecClass))
|
||||
|
||||
static void gst_d3d11_h264_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_h264_dec_finalize (GObject * object);
|
||||
static void gst_d3d11_h264_dec_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
|
||||
static gboolean gst_d3d11_h264_dec_open (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h264_dec_close (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder *
|
||||
decoder, GstQuery * query);
|
||||
static gboolean gst_d3d11_h264_dec_src_query (GstVideoDecoder * decoder,
|
||||
GstQuery * query);
|
||||
static gboolean gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
/* GstH264Decoder */
|
||||
static GstFlowReturn gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
|
||||
const GstH264SPS * sps, gint max_dpb_size);
|
||||
static GstFlowReturn gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstH264Picture * picture);
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * first_field, GstH264Picture * second_field);
|
||||
static GstFlowReturn gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
|
||||
static GstFlowReturn gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
|
||||
GArray * ref_pic_list1);
|
||||
static GstFlowReturn gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
|
||||
GST_D3D11_DECODER_DEFINE_TYPE_FULL (GstD3D11H264Dec, gst_d3d11_h264_dec,
|
||||
GST, D3D11_H264_DEC, GstDxvaH264Decoder);
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
|
||||
|
@ -167,11 +51,10 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
|
|||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
|
||||
GstH264DecoderClass *h264decoder_class = GST_H264_DECODER_CLASS (klass);
|
||||
GstDxvaH264DecoderClass *dxva_class = GST_DXVA_H264_DECODER_CLASS (klass);
|
||||
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
|
||||
|
||||
gobject_class->get_property = gst_d3d11_h264_dec_get_property;
|
||||
gobject_class->finalize = gst_d3d11_h264_dec_finalize;
|
||||
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_set_context);
|
||||
|
@ -194,29 +77,26 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
|
|||
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_negotiate);
|
||||
decoder_class->decide_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decide_allocation);
|
||||
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_sink_query);
|
||||
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_src_query);
|
||||
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_sink_event);
|
||||
|
||||
h264decoder_class->new_sequence =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_sequence);
|
||||
h264decoder_class->new_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_picture);
|
||||
h264decoder_class->new_field_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_field_picture);
|
||||
h264decoder_class->start_picture =
|
||||
dxva_class->configure = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_configure);
|
||||
dxva_class->new_picture = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_picture);
|
||||
dxva_class->duplicate_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_duplicate_picture);
|
||||
dxva_class->get_picture_id =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_get_picture_id);
|
||||
dxva_class->start_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_start_picture);
|
||||
h264decoder_class->decode_slice =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decode_slice);
|
||||
h264decoder_class->end_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_end_picture);
|
||||
h264decoder_class->output_picture =
|
||||
dxva_class->end_picture = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_end_picture);
|
||||
dxva_class->output_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_output_picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_init (GstD3D11H264Dec * self)
|
||||
{
|
||||
self->inner = new GstD3D11H264DecInner ();
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -229,74 +109,37 @@ gst_d3d11_h264_dec_get_property (GObject * object, guint prop_id,
|
|||
gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_finalize (GObject * object)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (object);
|
||||
|
||||
delete self->inner;
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (element);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
||||
context, cdata->adapter_luid, &inner->device);
|
||||
context, cdata->adapter_luid, &self->device);
|
||||
|
||||
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
||||
}
|
||||
|
||||
/* Clear all codec specific (e.g., SPS) data */
|
||||
static void
|
||||
gst_d3d11_h264_dec_reset (GstD3D11H264Dec * self)
|
||||
{
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
inner->width = 0;
|
||||
inner->height = 0;
|
||||
inner->coded_width = 0;
|
||||
inner->coded_height = 0;
|
||||
inner->bitdepth = 0;
|
||||
inner->chroma_format_idc = 0;
|
||||
inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
inner->interlaced = FALSE;
|
||||
inner->max_dpb_size = 0;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h264_dec_open (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
GstD3D11H264DecClass *klass = GST_D3D11_H264_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
if (!gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &inner->device, &inner->d3d11_decoder)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open decoder");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
gst_d3d11_h264_dec_reset (self);
|
||||
|
||||
return TRUE;
|
||||
return gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &self->device, &self->decoder);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h264_dec_close (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
gst_clear_object (&inner->d3d11_decoder);
|
||||
gst_clear_object (&inner->device);
|
||||
gst_clear_object (&self->decoder);
|
||||
gst_clear_object (&self->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -305,9 +148,8 @@ static gboolean
|
|||
gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
|
||||
if (!gst_d3d11_decoder_negotiate (self->decoder, decoder))
|
||||
return FALSE;
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
||||
|
@ -318,27 +160,42 @@ gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder * decoder,
|
|||
GstQuery * query)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
|
||||
decoder, query)) {
|
||||
if (!gst_d3d11_decoder_decide_allocation (self->decoder, decoder, query))
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
|
||||
(decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
gst_d3d11_h264_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, inner->device)) {
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
|
@ -353,482 +210,82 @@ static gboolean
|
|||
gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (self->decoder)
|
||||
gst_d3d11_decoder_sink_event (self->decoder, event);
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
|
||||
const GstH264SPS * sps, gint max_dpb_size)
|
||||
gst_d3d11_h264_dec_configure (GstDxvaH264Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info,
|
||||
gint crop_x, gint crop_y, gint coded_width, gint coded_height,
|
||||
gint max_dpb_size)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
gint crop_width, crop_height;
|
||||
gboolean interlaced;
|
||||
gboolean modified = FALSE;
|
||||
|
||||
GST_LOG_OBJECT (self, "new sequence");
|
||||
|
||||
if (sps->frame_cropping_flag) {
|
||||
crop_width = sps->crop_rect_width;
|
||||
crop_height = sps->crop_rect_height;
|
||||
} else {
|
||||
crop_width = sps->width;
|
||||
crop_height = sps->height;
|
||||
}
|
||||
|
||||
if (inner->width != crop_width || inner->height != crop_height ||
|
||||
inner->coded_width != sps->width || inner->coded_height != sps->height ||
|
||||
inner->crop_x != sps->crop_rect_x || inner->crop_y != sps->crop_rect_y) {
|
||||
GST_INFO_OBJECT (self, "resolution changed %dx%d (%dx%d)",
|
||||
crop_width, crop_height, sps->width, sps->height);
|
||||
inner->crop_x = sps->crop_rect_x;
|
||||
inner->crop_y = sps->crop_rect_y;
|
||||
inner->width = crop_width;
|
||||
inner->height = crop_height;
|
||||
inner->coded_width = sps->width;
|
||||
inner->coded_height = sps->height;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->bitdepth != sps->bit_depth_luma_minus8 + 8) {
|
||||
GST_INFO_OBJECT (self, "bitdepth changed");
|
||||
inner->bitdepth = (guint) sps->bit_depth_luma_minus8 + 8;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->chroma_format_idc != sps->chroma_format_idc) {
|
||||
GST_INFO_OBJECT (self, "chroma format changed");
|
||||
inner->chroma_format_idc = sps->chroma_format_idc;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
interlaced = !sps->frame_mbs_only_flag;
|
||||
if (inner->interlaced != interlaced) {
|
||||
GST_INFO_OBJECT (self, "interlaced sequence changed");
|
||||
inner->interlaced = interlaced;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->max_dpb_size < max_dpb_size) {
|
||||
GST_INFO_OBJECT (self, "Requires larger DPB size (%d -> %d)",
|
||||
inner->max_dpb_size, max_dpb_size);
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
|
||||
GstVideoInfo info;
|
||||
|
||||
inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
|
||||
if (inner->bitdepth == 8) {
|
||||
if (inner->chroma_format_idc == 1)
|
||||
inner->out_format = GST_VIDEO_FORMAT_NV12;
|
||||
else {
|
||||
GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format");
|
||||
}
|
||||
}
|
||||
|
||||
if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
|
||||
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
gst_video_info_set_format (&info,
|
||||
inner->out_format, inner->width, inner->height);
|
||||
if (inner->interlaced)
|
||||
GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
|
||||
|
||||
/* Store configured DPB size here. Then, it will be referenced later
|
||||
* to decide whether we need to re-open decoder object or not.
|
||||
* For instance, if every configuration is same apart from DPB size and
|
||||
* new DPB size is decreased, we can reuse existing decoder object.
|
||||
*/
|
||||
inner->max_dpb_size = max_dpb_size;
|
||||
if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
|
||||
decoder->input_state, &info, inner->crop_x, inner->crop_y,
|
||||
inner->coded_width, inner->coded_height, max_dpb_size)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create decoder");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
|
||||
GST_WARNING_OBJECT (self, "Failed to negotiate with downstream");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_configure (self->decoder, input_state,
|
||||
info, crop_x, crop_y, coded_width, coded_height, max_dpb_size);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstH264Picture * picture)
|
||||
gst_d3d11_h264_dec_new_picture (GstDxvaH264Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
|
||||
return gst_d3d11_decoder_new_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * first_field, GstH264Picture * second_field)
|
||||
gst_d3d11_h264_dec_duplicate_picture (GstDxvaH264Decoder * decoder,
|
||||
GstCodecPicture * src, GstCodecPicture * dst)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstBuffer *view_buffer;
|
||||
|
||||
view_buffer = (GstBuffer *) gst_h264_picture_get_user_data (first_field);
|
||||
|
||||
if (!view_buffer) {
|
||||
GST_WARNING_OBJECT (self, "First picture does not have output view buffer");
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
GST_LOG_OBJECT (self, "New field picture with buffer %" GST_PTR_FORMAT,
|
||||
view_buffer);
|
||||
|
||||
gst_h264_picture_set_user_data (second_field,
|
||||
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_duplicate_picture (self->decoder, src, dst);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_picture_params_from_sps (GstD3D11H264Dec * self,
|
||||
const GstH264SPS * sps, gboolean field_pic, DXVA_PicParams_H264 * params)
|
||||
static guint8
|
||||
gst_d3d11_h264_dec_get_picture_id (GstDxvaH264Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
#define COPY_FIELD(f) \
|
||||
(params)->f = (sps)->f
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
|
||||
params->wFrameWidthInMbsMinus1 = sps->pic_width_in_mbs_minus1;
|
||||
if (!sps->frame_mbs_only_flag) {
|
||||
params->wFrameHeightInMbsMinus1 =
|
||||
((sps->pic_height_in_map_units_minus1 + 1) << 1) - 1;
|
||||
} else {
|
||||
params->wFrameHeightInMbsMinus1 = sps->pic_height_in_map_units_minus1;
|
||||
}
|
||||
params->residual_colour_transform_flag = sps->separate_colour_plane_flag;
|
||||
params->MbaffFrameFlag = (sps->mb_adaptive_frame_field_flag && !field_pic);
|
||||
params->field_pic_flag = field_pic;
|
||||
params->MinLumaBipredSize8x8Flag = sps->level_idc >= 31;
|
||||
|
||||
COPY_FIELD (num_ref_frames);
|
||||
COPY_FIELD (chroma_format_idc);
|
||||
COPY_FIELD (frame_mbs_only_flag);
|
||||
COPY_FIELD (bit_depth_luma_minus8);
|
||||
COPY_FIELD (bit_depth_chroma_minus8);
|
||||
COPY_FIELD (log2_max_frame_num_minus4);
|
||||
COPY_FIELD (pic_order_cnt_type);
|
||||
COPY_FIELD (log2_max_pic_order_cnt_lsb_minus4);
|
||||
COPY_FIELD (delta_pic_order_always_zero_flag);
|
||||
COPY_FIELD (direct_8x8_inference_flag);
|
||||
|
||||
#undef COPY_FIELD
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_picture_params_from_pps (GstD3D11H264Dec * self,
|
||||
const GstH264PPS * pps, DXVA_PicParams_H264 * params)
|
||||
{
|
||||
#define COPY_FIELD(f) \
|
||||
(params)->f = (pps)->f
|
||||
|
||||
COPY_FIELD (constrained_intra_pred_flag);
|
||||
COPY_FIELD (weighted_pred_flag);
|
||||
COPY_FIELD (weighted_bipred_idc);
|
||||
COPY_FIELD (transform_8x8_mode_flag);
|
||||
COPY_FIELD (pic_init_qs_minus26);
|
||||
COPY_FIELD (chroma_qp_index_offset);
|
||||
COPY_FIELD (second_chroma_qp_index_offset);
|
||||
COPY_FIELD (pic_init_qp_minus26);
|
||||
COPY_FIELD (num_ref_idx_l0_active_minus1);
|
||||
COPY_FIELD (num_ref_idx_l1_active_minus1);
|
||||
COPY_FIELD (entropy_coding_mode_flag);
|
||||
COPY_FIELD (pic_order_present_flag);
|
||||
COPY_FIELD (deblocking_filter_control_present_flag);
|
||||
COPY_FIELD (redundant_pic_cnt_present_flag);
|
||||
COPY_FIELD (num_slice_groups_minus1);
|
||||
COPY_FIELD (slice_group_map_type);
|
||||
|
||||
#undef COPY_FIELD
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h264_dec_picture_params_from_slice_header (GstD3D11H264Dec *
|
||||
self, const GstH264SliceHdr * slice_header, DXVA_PicParams_H264 * params)
|
||||
{
|
||||
params->sp_for_switch_flag = slice_header->sp_for_switch_flag;
|
||||
params->field_pic_flag = slice_header->field_pic_flag;
|
||||
params->CurrPic.AssociatedFlag = slice_header->bottom_field_flag;
|
||||
params->IntraPicFlag =
|
||||
GST_H264_IS_I_SLICE (slice_header) || GST_H264_IS_SI_SLICE (slice_header);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h264_dec_fill_picture_params (GstD3D11H264Dec * self,
|
||||
const GstH264SliceHdr * slice_header, DXVA_PicParams_H264 * params)
|
||||
{
|
||||
const GstH264SPS *sps;
|
||||
const GstH264PPS *pps;
|
||||
|
||||
g_return_val_if_fail (slice_header->pps != NULL, FALSE);
|
||||
g_return_val_if_fail (slice_header->pps->sequence != NULL, FALSE);
|
||||
|
||||
pps = slice_header->pps;
|
||||
sps = pps->sequence;
|
||||
|
||||
params->MbsConsecutiveFlag = 1;
|
||||
params->Reserved16Bits = 3;
|
||||
params->ContinuationFlag = 1;
|
||||
params->Reserved8BitsA = 0;
|
||||
params->Reserved8BitsB = 0;
|
||||
params->StatusReportFeedbackNumber = 1;
|
||||
|
||||
gst_d3d11_h264_dec_picture_params_from_sps (self,
|
||||
sps, slice_header->field_pic_flag, params);
|
||||
gst_d3d11_h264_dec_picture_params_from_pps (self, pps, params);
|
||||
gst_d3d11_h264_dec_picture_params_from_slice_header (self,
|
||||
slice_header, params);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static inline void
|
||||
init_pic_params (DXVA_PicParams_H264 * params)
|
||||
{
|
||||
memset (params, 0, sizeof (DXVA_PicParams_H264));
|
||||
for (guint i = 0; i < G_N_ELEMENTS (params->RefFrameList); i++)
|
||||
params->RefFrameList[i].bPicEntry = 0xff;
|
||||
return gst_d3d11_decoder_get_picture_id (self->decoder, picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
|
||||
gst_d3d11_h264_dec_start_picture (GstDxvaH264Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
DXVA_PicParams_H264 *pic_params = &inner->pic_params;
|
||||
DXVA_Qmatrix_H264 *iq_matrix = &inner->iq_matrix;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
GArray *dpb_array;
|
||||
GstH264PPS *pps;
|
||||
guint i, j;
|
||||
|
||||
pps = slice->header.pps;
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
init_pic_params (pic_params);
|
||||
gst_d3d11_h264_dec_fill_picture_params (self, &slice->header, pic_params);
|
||||
|
||||
pic_params->CurrPic.Index7Bits = view_id;
|
||||
pic_params->RefPicFlag = GST_H264_PICTURE_IS_REF (picture);
|
||||
pic_params->frame_num = picture->frame_num;
|
||||
|
||||
if (picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD) {
|
||||
pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
|
||||
pic_params->CurrFieldOrderCnt[1] = 0;
|
||||
} else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
|
||||
pic_params->CurrFieldOrderCnt[0] = 0;
|
||||
pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
|
||||
} else {
|
||||
pic_params->CurrFieldOrderCnt[0] = picture->top_field_order_cnt;
|
||||
pic_params->CurrFieldOrderCnt[1] = picture->bottom_field_order_cnt;
|
||||
}
|
||||
|
||||
dpb_array = gst_h264_dpb_get_pictures_all (dpb);
|
||||
for (i = 0, j = 0; i < dpb_array->len && j < 16; i++) {
|
||||
GstH264Picture *other = g_array_index (dpb_array, GstH264Picture *, i);
|
||||
guint8 id = 0xff;
|
||||
|
||||
if (!GST_H264_PICTURE_IS_REF (other))
|
||||
continue;
|
||||
|
||||
/* The second field picture will be handled differently */
|
||||
if (other->second_field)
|
||||
continue;
|
||||
|
||||
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (other), &id);
|
||||
pic_params->RefFrameList[j].Index7Bits = id;
|
||||
|
||||
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)) {
|
||||
pic_params->RefFrameList[j].AssociatedFlag = 1;
|
||||
pic_params->FrameNumList[j] = other->long_term_frame_idx;
|
||||
} else {
|
||||
pic_params->RefFrameList[j].AssociatedFlag = 0;
|
||||
pic_params->FrameNumList[j] = other->frame_num;
|
||||
}
|
||||
|
||||
switch (other->field) {
|
||||
case GST_H264_PICTURE_FIELD_TOP_FIELD:
|
||||
pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
|
||||
pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
|
||||
break;
|
||||
case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
|
||||
pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
|
||||
pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
|
||||
break;
|
||||
default:
|
||||
pic_params->FieldOrderCntList[j][0] = other->top_field_order_cnt;
|
||||
pic_params->FieldOrderCntList[j][1] = other->bottom_field_order_cnt;
|
||||
pic_params->UsedForReferenceFlags |= 0x3 << (2 * j);
|
||||
break;
|
||||
}
|
||||
|
||||
if (other->other_field) {
|
||||
GstH264Picture *other_field = other->other_field;
|
||||
|
||||
switch (other_field->field) {
|
||||
case GST_H264_PICTURE_FIELD_TOP_FIELD:
|
||||
pic_params->FieldOrderCntList[j][0] =
|
||||
other_field->top_field_order_cnt;
|
||||
pic_params->UsedForReferenceFlags |= 0x1 << (2 * j);
|
||||
break;
|
||||
case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
|
||||
pic_params->FieldOrderCntList[j][1] =
|
||||
other_field->bottom_field_order_cnt;
|
||||
pic_params->UsedForReferenceFlags |= 0x1 << (2 * j + 1);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
pic_params->NonExistingFrameFlags |= (other->nonexisting) << j;
|
||||
j++;
|
||||
}
|
||||
g_array_unref (dpb_array);
|
||||
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists4x4) ==
|
||||
sizeof (pps->scaling_lists_4x4));
|
||||
memcpy (iq_matrix->bScalingLists4x4, pps->scaling_lists_4x4,
|
||||
sizeof (pps->scaling_lists_4x4));
|
||||
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->bScalingLists8x8[0]) ==
|
||||
sizeof (pps->scaling_lists_8x8[0]));
|
||||
memcpy (iq_matrix->bScalingLists8x8[0], pps->scaling_lists_8x8[0],
|
||||
sizeof (pps->scaling_lists_8x8[0]));
|
||||
memcpy (iq_matrix->bScalingLists8x8[1], pps->scaling_lists_8x8[1],
|
||||
sizeof (pps->scaling_lists_8x8[1]));
|
||||
|
||||
inner->slice_list.resize (0);
|
||||
inner->bitstream_buffer.resize (0);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_start_picture (self->decoder, picture, picture_id);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
|
||||
GArray * ref_pic_list1)
|
||||
gst_d3d11_h264_dec_end_picture (GstDxvaH264Decoder * decoder,
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics,
|
||||
const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
DXVA_Slice_H264_Short dxva_slice;
|
||||
static const guint8 start_code[] = { 0, 0, 1 };
|
||||
const size_t start_code_size = sizeof (start_code);
|
||||
|
||||
dxva_slice.BSNALunitDataLocation = inner->bitstream_buffer.size ();
|
||||
/* Includes 3 bytes start code prefix */
|
||||
dxva_slice.SliceBytesInBuffer = slice->nalu.size + start_code_size;
|
||||
dxva_slice.wBadSliceChopping = 0;
|
||||
|
||||
inner->slice_list.push_back (dxva_slice);
|
||||
|
||||
size_t pos = inner->bitstream_buffer.size ();
|
||||
inner->bitstream_buffer.resize (pos + start_code_size + slice->nalu.size);
|
||||
|
||||
/* Fill start code prefix */
|
||||
memcpy (&inner->bitstream_buffer[0] + pos, start_code, start_code_size);
|
||||
|
||||
/* Copy bitstream */
|
||||
memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
|
||||
slice->nalu.data + slice->nalu.offset, slice->nalu.size);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_end_picture (self->decoder, picture, args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
|
||||
GstH264Picture * picture)
|
||||
gst_d3d11_h264_dec_output_picture (GstDxvaH264Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture,
|
||||
GstVideoBufferFlags buffer_flags, gint display_width, gint display_height)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
size_t bitstream_buffer_size;
|
||||
size_t bitstream_pos;
|
||||
GstD3D11DecodeInputStreamArgs input_args;
|
||||
|
||||
GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
|
||||
picture, picture->pic_order_cnt);
|
||||
|
||||
if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
|
||||
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
|
||||
|
||||
bitstream_pos = inner->bitstream_buffer.size ();
|
||||
bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
|
||||
|
||||
if (bitstream_buffer_size > bitstream_pos) {
|
||||
size_t padding = bitstream_buffer_size - bitstream_pos;
|
||||
|
||||
/* As per DXVA spec, total amount of bitstream buffer size should be
|
||||
* 128 bytes aligned. If actual data is not multiple of 128 bytes,
|
||||
* the last slice data needs to be zero-padded */
|
||||
inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
|
||||
|
||||
DXVA_Slice_H264_Short & slice = inner->slice_list.back ();
|
||||
slice.SliceBytesInBuffer += padding;
|
||||
}
|
||||
|
||||
input_args.picture_params = &inner->pic_params;
|
||||
input_args.picture_params_size = sizeof (DXVA_PicParams_H264);
|
||||
input_args.slice_control = &inner->slice_list[0];
|
||||
input_args.slice_control_size =
|
||||
sizeof (DXVA_Slice_H264_Short) * inner->slice_list.size ();
|
||||
input_args.bitstream = &inner->bitstream_buffer[0];
|
||||
input_args.bitstream_size = inner->bitstream_buffer.size ();
|
||||
input_args.inverse_quantization_matrix = &inner->iq_matrix;
|
||||
input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264);
|
||||
|
||||
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &input_args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstH264Picture * picture)
|
||||
{
|
||||
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
|
||||
GstD3D11H264DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
|
||||
picture->buffer_flags, inner->width, inner->height);
|
||||
return gst_d3d11_decoder_output_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, picture,
|
||||
buffer_flags, display_width, display_height);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -843,18 +300,18 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
gboolean ret;
|
||||
GTypeInfo type_info = {
|
||||
sizeof (GstD3D11H264DecClass),
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
(GClassInitFunc) gst_d3d11_h264_dec_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
sizeof (GstD3D11H264Dec),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_d3d11_h264_dec_init,
|
||||
};
|
||||
const GUID *supported_profile = NULL;
|
||||
GstCaps *sink_caps = NULL;
|
||||
GstCaps *src_caps = NULL;
|
||||
const GUID *supported_profile = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *src_caps = nullptr;
|
||||
guint max_width = 0;
|
||||
guint max_height = 0;
|
||||
guint resolution;
|
||||
|
@ -867,8 +324,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
return;
|
||||
}
|
||||
|
||||
ret =
|
||||
gst_d3d11_decoder_supports_format (device, supported_profile,
|
||||
ret = gst_d3d11_decoder_supports_format (device, supported_profile,
|
||||
DXGI_FORMAT_NV12);
|
||||
if (!ret) {
|
||||
GST_FIXME_OBJECT (device, "device does not support NV12 format");
|
||||
|
@ -927,7 +383,7 @@ gst_d3d11_h264_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
feature_name = g_strdup_printf ("d3d11h264device%ddec", index);
|
||||
}
|
||||
|
||||
type = g_type_register_static (GST_TYPE_H264_DECODER,
|
||||
type = g_type_register_static (GST_TYPE_DXVA_H264_DECODER,
|
||||
type_name, &type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_H264_DEC_H__
|
||||
#define __GST_D3D11_H264_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -30,5 +29,3 @@ void gst_d3d11_h264_dec_register (GstPlugin * plugin,
|
|||
gboolean legacy);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_H264_DEC_H__ */
|
||||
|
|
|
@ -37,98 +37,13 @@
|
|||
#endif
|
||||
|
||||
#include "gstd3d11h265dec.h"
|
||||
|
||||
#include <gst/codecs/gsth265decoder.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
|
||||
/* HACK: to expose dxva data structure on UWP */
|
||||
#ifdef WINAPI_PARTITION_DESKTOP
|
||||
#undef WINAPI_PARTITION_DESKTOP
|
||||
#endif
|
||||
#define WINAPI_PARTITION_DESKTOP 1
|
||||
#include <d3d9.h>
|
||||
#include <dxva.h>
|
||||
#include <gst/dxva/gstdxvah265decoder.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h265_dec_debug);
|
||||
#define GST_CAT_DEFAULT gst_d3d11_h265_dec_debug
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
typedef struct _GstD3D11H265DecInner
|
||||
{
|
||||
GstD3D11Device *device = nullptr;
|
||||
GstD3D11Decoder *d3d11_decoder = nullptr;
|
||||
|
||||
DXVA_PicParams_HEVC pic_params;
|
||||
DXVA_Qmatrix_HEVC iq_matrix;
|
||||
|
||||
std::vector<DXVA_Slice_HEVC_Short> slice_list;
|
||||
std::vector<guint8> bitstream_buffer;
|
||||
|
||||
gboolean submit_iq_data;
|
||||
|
||||
gint crop_x = 0;
|
||||
gint crop_y = 0;
|
||||
gint width = 0;
|
||||
gint height = 0;
|
||||
gint coded_width = 0;
|
||||
gint coded_height = 0;
|
||||
guint bitdepth = 0;
|
||||
guint8 chroma_format_idc = 0;
|
||||
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
gint max_dpb_size = 0;
|
||||
} GstD3D11H265DecInner;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
typedef struct _GstD3D11H265Dec
|
||||
{
|
||||
GstH265Decoder parent;
|
||||
GstD3D11H265DecInner *inner;
|
||||
} GstD3D11H265Dec;
|
||||
|
||||
typedef struct _GstD3D11H265DecClass
|
||||
{
|
||||
GstH265DecoderClass parent_class;
|
||||
GstD3D11DecoderSubClassData class_data;
|
||||
} GstD3D11H265DecClass;
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
#define GST_D3D11_H265_DEC(object) ((GstD3D11H265Dec *) (object))
|
||||
#define GST_D3D11_H265_DEC_GET_CLASS(object) \
|
||||
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11H265DecClass))
|
||||
|
||||
static void gst_d3d11_h265_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_h265_dec_finalize (GObject * object);
|
||||
static void gst_d3d11_h265_dec_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
|
||||
static gboolean gst_d3d11_h265_dec_open (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h265_dec_close (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder *
|
||||
decoder, GstQuery * query);
|
||||
static gboolean gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder,
|
||||
GstQuery * query);
|
||||
static gboolean gst_d3d11_h265_dec_sink_event (GstVideoDecoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
/* GstH265Decoder */
|
||||
static GstFlowReturn gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
|
||||
const GstH265SPS * sps, gint max_dpb_size);
|
||||
static GstFlowReturn gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
|
||||
GstVideoCodecFrame * cframe, GstH265Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb);
|
||||
static GstFlowReturn gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture, GstH265Slice * slice,
|
||||
GArray * ref_pic_list0, GArray * ref_pic_list1);
|
||||
static GstFlowReturn gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_h265_dec_output_picture (GstH265Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstH265Picture * picture);
|
||||
GST_D3D11_DECODER_DEFINE_TYPE (GstD3D11H265Dec, gst_d3d11_h265_dec,
|
||||
GST, D3D11_H265_DEC, GstDxvaH265Decoder);
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
|
||||
|
@ -136,11 +51,10 @@ gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
|
|||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
|
||||
GstH265DecoderClass *h265decoder_class = GST_H265_DECODER_CLASS (klass);
|
||||
GstDxvaH265DecoderClass *dxva_class = GST_DXVA_H265_DECODER_CLASS (klass);
|
||||
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
|
||||
|
||||
gobject_class->get_property = gst_d3d11_h265_dec_get_property;
|
||||
gobject_class->finalize = gst_d3d11_h265_dec_finalize;
|
||||
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_set_context);
|
||||
|
@ -163,27 +77,24 @@ gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass, gpointer data)
|
|||
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_negotiate);
|
||||
decoder_class->decide_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decide_allocation);
|
||||
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_sink_query);
|
||||
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_src_query);
|
||||
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_sink_event);
|
||||
|
||||
h265decoder_class->new_sequence =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_new_sequence);
|
||||
h265decoder_class->new_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_new_picture);
|
||||
h265decoder_class->start_picture =
|
||||
dxva_class->configure = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_configure);
|
||||
dxva_class->new_picture = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_new_picture);
|
||||
dxva_class->get_picture_id =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_get_picture_id);
|
||||
dxva_class->start_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_start_picture);
|
||||
h265decoder_class->decode_slice =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decode_slice);
|
||||
h265decoder_class->end_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_end_picture);
|
||||
h265decoder_class->output_picture =
|
||||
dxva_class->end_picture = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_end_picture);
|
||||
dxva_class->output_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_output_picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_init (GstD3D11H265Dec * self)
|
||||
{
|
||||
self->inner = new GstD3D11H265DecInner ();
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -196,73 +107,37 @@ gst_d3d11_h265_dec_get_property (GObject * object, guint prop_id,
|
|||
gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_finalize (GObject * object)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (object);
|
||||
|
||||
delete self->inner;
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (element);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
GstD3D11H265DecClass *klass = GST_D3D11_H265_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
||||
context, cdata->adapter_luid, &inner->device);
|
||||
context, cdata->adapter_luid, &self->device);
|
||||
|
||||
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_reset (GstD3D11H265Dec * self)
|
||||
{
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
inner->width = 0;
|
||||
inner->height = 0;
|
||||
inner->coded_width = 0;
|
||||
inner->coded_height = 0;
|
||||
inner->bitdepth = 0;
|
||||
inner->chroma_format_idc = 0;
|
||||
inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
inner->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
inner->max_dpb_size = 0;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h265_dec_open (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
GstD3D11H265DecClass *klass = GST_D3D11_H265_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
if (!gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &inner->device, &inner->d3d11_decoder)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open decoder");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
gst_d3d11_h265_dec_reset (self);
|
||||
|
||||
return TRUE;
|
||||
return gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &self->device, &self->decoder);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h265_dec_close (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
gst_clear_object (&inner->d3d11_decoder);
|
||||
gst_clear_object (&inner->device);
|
||||
gst_clear_object (&self->decoder);
|
||||
gst_clear_object (&self->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -271,9 +146,8 @@ static gboolean
|
|||
gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
|
||||
if (!gst_d3d11_decoder_negotiate (self->decoder, decoder))
|
||||
return FALSE;
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
||||
|
@ -284,27 +158,42 @@ gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder * decoder,
|
|||
GstQuery * query)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
|
||||
decoder, query)) {
|
||||
if (!gst_d3d11_decoder_decide_allocation (self->decoder, decoder, query))
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
|
||||
(decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
gst_d3d11_h265_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, inner->device)) {
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
|
@ -319,593 +208,73 @@ static gboolean
|
|||
gst_d3d11_h265_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (self->decoder)
|
||||
gst_d3d11_decoder_sink_event (self->decoder, event);
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder,
|
||||
const GstH265SPS * sps, gint max_dpb_size)
|
||||
gst_d3d11_h265_dec_configure (GstDxvaH265Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info,
|
||||
gint crop_x, gint crop_y, gint coded_width, gint coded_height,
|
||||
gint max_dpb_size)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
gint crop_width, crop_height;
|
||||
gboolean modified = FALSE;
|
||||
GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
|
||||
GST_LOG_OBJECT (self, "new sequence");
|
||||
|
||||
if (sps->conformance_window_flag) {
|
||||
crop_width = sps->crop_rect_width;
|
||||
crop_height = sps->crop_rect_height;
|
||||
} else {
|
||||
crop_width = sps->width;
|
||||
crop_height = sps->height;
|
||||
}
|
||||
|
||||
if (inner->width != crop_width || inner->height != crop_height ||
|
||||
inner->coded_width != sps->width || inner->coded_height != sps->height ||
|
||||
inner->crop_x != sps->crop_rect_x || inner->crop_y != sps->crop_rect_y) {
|
||||
GST_INFO_OBJECT (self, "resolution changed %dx%d -> %dx%d",
|
||||
crop_width, crop_height, sps->width, sps->height);
|
||||
inner->crop_x = sps->crop_rect_x;
|
||||
inner->crop_y = sps->crop_rect_y;
|
||||
inner->width = crop_width;
|
||||
inner->height = crop_height;
|
||||
inner->coded_width = sps->width;
|
||||
inner->coded_height = sps->height;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->bitdepth != (guint) sps->bit_depth_luma_minus8 + 8) {
|
||||
GST_INFO_OBJECT (self, "bitdepth changed");
|
||||
inner->bitdepth = sps->bit_depth_luma_minus8 + 8;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (sps->vui_parameters_present_flag && sps->vui_params.field_seq_flag) {
|
||||
interlace_mode = GST_VIDEO_INTERLACE_MODE_ALTERNATE;
|
||||
} else {
|
||||
/* 7.4.4 Profile, tier and level sementics */
|
||||
if (sps->profile_tier_level.progressive_source_flag &&
|
||||
!sps->profile_tier_level.interlaced_source_flag) {
|
||||
interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
} else {
|
||||
interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
|
||||
}
|
||||
}
|
||||
|
||||
if (inner->interlace_mode != interlace_mode) {
|
||||
GST_INFO_OBJECT (self, "Interlace mode change %d -> %d",
|
||||
inner->interlace_mode, interlace_mode);
|
||||
inner->interlace_mode = interlace_mode;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->chroma_format_idc != sps->chroma_format_idc) {
|
||||
GST_INFO_OBJECT (self, "chroma format changed");
|
||||
inner->chroma_format_idc = sps->chroma_format_idc;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (inner->max_dpb_size < max_dpb_size) {
|
||||
GST_INFO_OBJECT (self, "Requires larger DPB size (%d -> %d)",
|
||||
inner->max_dpb_size, max_dpb_size);
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
|
||||
GstVideoInfo info;
|
||||
|
||||
inner->out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
|
||||
if (inner->bitdepth == 8) {
|
||||
if (inner->chroma_format_idc == 1) {
|
||||
inner->out_format = GST_VIDEO_FORMAT_NV12;
|
||||
} else {
|
||||
GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format");
|
||||
}
|
||||
} else if (inner->bitdepth == 10) {
|
||||
if (inner->chroma_format_idc == 1) {
|
||||
inner->out_format = GST_VIDEO_FORMAT_P010_10LE;
|
||||
} else {
|
||||
GST_FIXME_OBJECT (self, "Could not support 10bits non-4:2:0 format");
|
||||
}
|
||||
}
|
||||
|
||||
if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
|
||||
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
gst_video_info_set_format (&info,
|
||||
inner->out_format, inner->width, inner->height);
|
||||
GST_VIDEO_INFO_INTERLACE_MODE (&info) = inner->interlace_mode;
|
||||
|
||||
inner->max_dpb_size = max_dpb_size;
|
||||
if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
|
||||
decoder->input_state, &info, inner->crop_x, inner->crop_y,
|
||||
inner->coded_width, inner->coded_height, max_dpb_size)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create decoder");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
|
||||
GST_WARNING_OBJECT (self, "Failed to negotiate with downstream");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_configure (self->decoder, input_state,
|
||||
info, crop_x, crop_y, coded_width, coded_height, max_dpb_size);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
|
||||
GstVideoCodecFrame * cframe, GstH265Picture * picture)
|
||||
gst_d3d11_h265_dec_new_picture (GstDxvaH265Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
|
||||
return gst_d3d11_decoder_new_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_picture_params_from_sps (GstD3D11H265Dec * self,
|
||||
const GstH265SPS * sps, DXVA_PicParams_HEVC * params)
|
||||
static guint8
|
||||
gst_d3d11_h265_dec_get_picture_id (GstDxvaH265Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
#define COPY_FIELD(f) \
|
||||
(params)->f = (sps)->f
|
||||
#define COPY_FIELD_WITH_PREFIX(f) \
|
||||
(params)->G_PASTE(sps_,f) = (sps)->f
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
|
||||
params->PicWidthInMinCbsY =
|
||||
sps->width >> (sps->log2_min_luma_coding_block_size_minus3 + 3);
|
||||
params->PicHeightInMinCbsY =
|
||||
sps->height >> (sps->log2_min_luma_coding_block_size_minus3 + 3);
|
||||
params->sps_max_dec_pic_buffering_minus1 =
|
||||
sps->max_dec_pic_buffering_minus1[sps->max_sub_layers_minus1];
|
||||
|
||||
COPY_FIELD (chroma_format_idc);
|
||||
COPY_FIELD (separate_colour_plane_flag);
|
||||
COPY_FIELD (bit_depth_luma_minus8);
|
||||
COPY_FIELD (bit_depth_chroma_minus8);
|
||||
COPY_FIELD (log2_max_pic_order_cnt_lsb_minus4);
|
||||
COPY_FIELD (log2_min_luma_coding_block_size_minus3);
|
||||
COPY_FIELD (log2_diff_max_min_luma_coding_block_size);
|
||||
COPY_FIELD (log2_min_transform_block_size_minus2);
|
||||
COPY_FIELD (log2_diff_max_min_transform_block_size);
|
||||
COPY_FIELD (max_transform_hierarchy_depth_inter);
|
||||
COPY_FIELD (max_transform_hierarchy_depth_intra);
|
||||
COPY_FIELD (num_short_term_ref_pic_sets);
|
||||
COPY_FIELD (num_long_term_ref_pics_sps);
|
||||
COPY_FIELD (scaling_list_enabled_flag);
|
||||
COPY_FIELD (amp_enabled_flag);
|
||||
COPY_FIELD (sample_adaptive_offset_enabled_flag);
|
||||
COPY_FIELD (pcm_enabled_flag);
|
||||
|
||||
if (sps->pcm_enabled_flag) {
|
||||
COPY_FIELD (pcm_sample_bit_depth_luma_minus1);
|
||||
COPY_FIELD (pcm_sample_bit_depth_chroma_minus1);
|
||||
COPY_FIELD (log2_min_pcm_luma_coding_block_size_minus3);
|
||||
COPY_FIELD (log2_diff_max_min_pcm_luma_coding_block_size);
|
||||
}
|
||||
|
||||
COPY_FIELD (pcm_loop_filter_disabled_flag);
|
||||
COPY_FIELD (long_term_ref_pics_present_flag);
|
||||
COPY_FIELD_WITH_PREFIX (temporal_mvp_enabled_flag);
|
||||
COPY_FIELD (strong_intra_smoothing_enabled_flag);
|
||||
|
||||
#undef COPY_FIELD
|
||||
#undef COPY_FIELD_WITH_PREFIX
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_picture_params_from_pps (GstD3D11H265Dec * self,
|
||||
const GstH265PPS * pps, DXVA_PicParams_HEVC * params)
|
||||
{
|
||||
guint i;
|
||||
|
||||
#define COPY_FIELD(f) \
|
||||
(params)->f = (pps)->f
|
||||
#define COPY_FIELD_WITH_PREFIX(f) \
|
||||
(params)->G_PASTE(pps_,f) = (pps)->f
|
||||
|
||||
COPY_FIELD (num_ref_idx_l0_default_active_minus1);
|
||||
COPY_FIELD (num_ref_idx_l1_default_active_minus1);
|
||||
COPY_FIELD (init_qp_minus26);
|
||||
COPY_FIELD (dependent_slice_segments_enabled_flag);
|
||||
COPY_FIELD (output_flag_present_flag);
|
||||
COPY_FIELD (num_extra_slice_header_bits);
|
||||
COPY_FIELD (sign_data_hiding_enabled_flag);
|
||||
COPY_FIELD (cabac_init_present_flag);
|
||||
COPY_FIELD (constrained_intra_pred_flag);
|
||||
COPY_FIELD (transform_skip_enabled_flag);
|
||||
COPY_FIELD (cu_qp_delta_enabled_flag);
|
||||
COPY_FIELD_WITH_PREFIX (slice_chroma_qp_offsets_present_flag);
|
||||
COPY_FIELD (weighted_pred_flag);
|
||||
COPY_FIELD (weighted_bipred_flag);
|
||||
COPY_FIELD (transquant_bypass_enabled_flag);
|
||||
COPY_FIELD (tiles_enabled_flag);
|
||||
COPY_FIELD (entropy_coding_sync_enabled_flag);
|
||||
COPY_FIELD (uniform_spacing_flag);
|
||||
|
||||
if (pps->tiles_enabled_flag)
|
||||
COPY_FIELD (loop_filter_across_tiles_enabled_flag);
|
||||
|
||||
COPY_FIELD_WITH_PREFIX (loop_filter_across_slices_enabled_flag);
|
||||
COPY_FIELD (deblocking_filter_override_enabled_flag);
|
||||
COPY_FIELD_WITH_PREFIX (deblocking_filter_disabled_flag);
|
||||
COPY_FIELD (lists_modification_present_flag);
|
||||
COPY_FIELD (slice_segment_header_extension_present_flag);
|
||||
COPY_FIELD_WITH_PREFIX (cb_qp_offset);
|
||||
COPY_FIELD_WITH_PREFIX (cr_qp_offset);
|
||||
|
||||
if (pps->tiles_enabled_flag) {
|
||||
COPY_FIELD (num_tile_columns_minus1);
|
||||
COPY_FIELD (num_tile_rows_minus1);
|
||||
if (!pps->uniform_spacing_flag) {
|
||||
for (i = 0; i < pps->num_tile_columns_minus1 &&
|
||||
i < G_N_ELEMENTS (params->column_width_minus1); i++)
|
||||
COPY_FIELD (column_width_minus1[i]);
|
||||
|
||||
for (i = 0; i < pps->num_tile_rows_minus1 &&
|
||||
i < G_N_ELEMENTS (params->row_height_minus1); i++)
|
||||
COPY_FIELD (row_height_minus1[i]);
|
||||
}
|
||||
}
|
||||
|
||||
COPY_FIELD (diff_cu_qp_delta_depth);
|
||||
COPY_FIELD_WITH_PREFIX (beta_offset_div2);
|
||||
COPY_FIELD_WITH_PREFIX (tc_offset_div2);
|
||||
COPY_FIELD (log2_parallel_merge_level_minus2);
|
||||
|
||||
#undef COPY_FIELD
|
||||
#undef COPY_FIELD_WITH_PREFIX
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_h265_dec_picture_params_from_slice_header (GstD3D11H265Dec *
|
||||
self, const GstH265SliceHdr * slice_header, DXVA_PicParams_HEVC * params)
|
||||
{
|
||||
if (slice_header->short_term_ref_pic_set_sps_flag == 0) {
|
||||
params->ucNumDeltaPocsOfRefRpsIdx =
|
||||
slice_header->short_term_ref_pic_sets.NumDeltaPocsOfRefRpsIdx;
|
||||
params->wNumBitsForShortTermRPSInSlice =
|
||||
slice_header->short_term_ref_pic_set_size;
|
||||
}
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self,
|
||||
const GstH265SliceHdr * slice_header, DXVA_PicParams_HEVC * params)
|
||||
{
|
||||
const GstH265SPS *sps;
|
||||
const GstH265PPS *pps;
|
||||
|
||||
g_return_val_if_fail (slice_header->pps != NULL, FALSE);
|
||||
g_return_val_if_fail (slice_header->pps->sps != NULL, FALSE);
|
||||
|
||||
pps = slice_header->pps;
|
||||
sps = pps->sps;
|
||||
|
||||
/* not related to hevc syntax */
|
||||
params->NoPicReorderingFlag = 0;
|
||||
params->NoBiPredFlag = 0;
|
||||
params->ReservedBits1 = 0;
|
||||
params->StatusReportFeedbackNumber = 1;
|
||||
|
||||
gst_d3d11_h265_dec_picture_params_from_sps (self, sps, params);
|
||||
gst_d3d11_h265_dec_picture_params_from_pps (self, pps, params);
|
||||
gst_d3d11_h265_dec_picture_params_from_slice_header (self,
|
||||
slice_header, params);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static UCHAR
|
||||
gst_d3d11_h265_dec_get_ref_index (const DXVA_PicParams_HEVC * pic_params,
|
||||
guint8 view_id)
|
||||
{
|
||||
for (UCHAR i = 0; i < G_N_ELEMENTS (pic_params->RefPicList); i++) {
|
||||
if (pic_params->RefPicList[i].Index7Bits == view_id)
|
||||
return i;
|
||||
}
|
||||
|
||||
return 0xff;
|
||||
}
|
||||
|
||||
static inline void
|
||||
init_pic_params (DXVA_PicParams_HEVC * params)
|
||||
{
|
||||
memset (params, 0, sizeof (DXVA_PicParams_HEVC));
|
||||
for (guint i = 0; i < G_N_ELEMENTS (params->RefPicList); i++)
|
||||
params->RefPicList[i].bPicEntry = 0xff;
|
||||
|
||||
for (guint i = 0; i < G_N_ELEMENTS (params->RefPicSetStCurrBefore); i++) {
|
||||
params->RefPicSetStCurrBefore[i] = 0xff;
|
||||
params->RefPicSetStCurrAfter[i] = 0xff;
|
||||
params->RefPicSetLtCurr[i] = 0xff;
|
||||
}
|
||||
return gst_d3d11_decoder_get_picture_id (self->decoder, picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb)
|
||||
gst_d3d11_h265_dec_start_picture (GstDxvaH265Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
DXVA_PicParams_HEVC *pic_params = &inner->pic_params;
|
||||
DXVA_Qmatrix_HEVC *iq_matrix = &inner->iq_matrix;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
guint i, j;
|
||||
GArray *dpb_array;
|
||||
GstH265SPS *sps;
|
||||
GstH265PPS *pps;
|
||||
GstH265ScalingList *scaling_list = nullptr;
|
||||
|
||||
pps = slice->header.pps;
|
||||
sps = pps->sps;
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
init_pic_params (pic_params);
|
||||
gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, pic_params);
|
||||
|
||||
pic_params->CurrPic.Index7Bits = view_id;
|
||||
pic_params->IrapPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
|
||||
pic_params->IdrPicFlag = GST_H265_IS_NAL_TYPE_IDR (slice->nalu.type);
|
||||
pic_params->IntraPicFlag = GST_H265_IS_NAL_TYPE_IRAP (slice->nalu.type);
|
||||
pic_params->CurrPicOrderCntVal = picture->pic_order_cnt;
|
||||
|
||||
dpb_array = gst_h265_dpb_get_pictures_all (dpb);
|
||||
for (i = 0, j = 0;
|
||||
i < dpb_array->len && j < G_N_ELEMENTS (pic_params->RefPicList); i++) {
|
||||
GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i);
|
||||
guint8 id = 0xff;
|
||||
|
||||
if (!other->ref) {
|
||||
GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i);
|
||||
continue;
|
||||
}
|
||||
|
||||
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (other), &id);
|
||||
pic_params->RefPicList[j].Index7Bits = id;
|
||||
pic_params->RefPicList[j].AssociatedFlag = other->long_term;
|
||||
pic_params->PicOrderCntValList[j] = other->pic_order_cnt;
|
||||
j++;
|
||||
}
|
||||
g_array_unref (dpb_array);
|
||||
|
||||
for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetStCurrBefore); i++) {
|
||||
GstH265Picture *other = nullptr;
|
||||
guint8 other_view_id = 0xff;
|
||||
guint8 id = 0xff;
|
||||
|
||||
while (!other && j < decoder->NumPocStCurrBefore)
|
||||
other = decoder->RefPicSetStCurrBefore[j++];
|
||||
|
||||
if (other) {
|
||||
ID3D11VideoDecoderOutputView *other_view;
|
||||
|
||||
other_view =
|
||||
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (other), &other_view_id);
|
||||
|
||||
if (other_view)
|
||||
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
|
||||
}
|
||||
|
||||
pic_params->RefPicSetStCurrBefore[i] = id;
|
||||
}
|
||||
|
||||
for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetStCurrAfter); i++) {
|
||||
GstH265Picture *other = nullptr;
|
||||
guint8 other_view_id = 0xff;
|
||||
guint8 id = 0xff;
|
||||
|
||||
while (!other && j < decoder->NumPocStCurrAfter)
|
||||
other = decoder->RefPicSetStCurrAfter[j++];
|
||||
|
||||
if (other) {
|
||||
ID3D11VideoDecoderOutputView *other_view;
|
||||
|
||||
other_view =
|
||||
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (other), &other_view_id);
|
||||
|
||||
if (other_view)
|
||||
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
|
||||
}
|
||||
|
||||
pic_params->RefPicSetStCurrAfter[i] = id;
|
||||
}
|
||||
|
||||
for (i = 0, j = 0; i < G_N_ELEMENTS (pic_params->RefPicSetLtCurr); i++) {
|
||||
GstH265Picture *other = nullptr;
|
||||
guint8 other_view_id = 0xff;
|
||||
guint8 id = 0xff;
|
||||
|
||||
while (!other && j < decoder->NumPocLtCurr)
|
||||
other = decoder->RefPicSetLtCurr[j++];
|
||||
|
||||
if (other) {
|
||||
ID3D11VideoDecoderOutputView *other_view;
|
||||
|
||||
other_view =
|
||||
gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (other), &other_view_id);
|
||||
|
||||
|
||||
if (other_view)
|
||||
id = gst_d3d11_h265_dec_get_ref_index (pic_params, other_view_id);
|
||||
}
|
||||
|
||||
pic_params->RefPicSetLtCurr[i] = id;
|
||||
}
|
||||
|
||||
if (pps->scaling_list_data_present_flag ||
|
||||
(sps->scaling_list_enabled_flag
|
||||
&& !sps->scaling_list_data_present_flag)) {
|
||||
scaling_list = &pps->scaling_list;
|
||||
} else if (sps->scaling_list_enabled_flag &&
|
||||
sps->scaling_list_data_present_flag) {
|
||||
scaling_list = &sps->scaling_list;
|
||||
}
|
||||
|
||||
if (scaling_list) {
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists0) ==
|
||||
sizeof (scaling_list->scaling_lists_4x4));
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists1) ==
|
||||
sizeof (scaling_list->scaling_lists_8x8));
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists2) ==
|
||||
sizeof (scaling_list->scaling_lists_16x16));
|
||||
G_STATIC_ASSERT (sizeof (iq_matrix->ucScalingLists3) ==
|
||||
sizeof (scaling_list->scaling_lists_32x32));
|
||||
|
||||
memcpy (iq_matrix->ucScalingLists0, scaling_list->scaling_lists_4x4,
|
||||
sizeof (iq_matrix->ucScalingLists0));
|
||||
memcpy (iq_matrix->ucScalingLists1, scaling_list->scaling_lists_8x8,
|
||||
sizeof (iq_matrix->ucScalingLists1));
|
||||
memcpy (iq_matrix->ucScalingLists2, scaling_list->scaling_lists_16x16,
|
||||
sizeof (iq_matrix->ucScalingLists2));
|
||||
memcpy (iq_matrix->ucScalingLists3, scaling_list->scaling_lists_32x32,
|
||||
sizeof (iq_matrix->ucScalingLists3));
|
||||
|
||||
for (i = 0; i < G_N_ELEMENTS (iq_matrix->ucScalingListDCCoefSizeID2); i++) {
|
||||
iq_matrix->ucScalingListDCCoefSizeID2[i] =
|
||||
scaling_list->scaling_list_dc_coef_minus8_16x16[i] + 8;
|
||||
}
|
||||
|
||||
for (i = 0; i < G_N_ELEMENTS (iq_matrix->ucScalingListDCCoefSizeID3); i++) {
|
||||
iq_matrix->ucScalingListDCCoefSizeID3[i] =
|
||||
scaling_list->scaling_list_dc_coef_minus8_32x32[i] + 8;
|
||||
}
|
||||
|
||||
inner->submit_iq_data = TRUE;
|
||||
} else {
|
||||
inner->submit_iq_data = FALSE;
|
||||
}
|
||||
|
||||
inner->slice_list.resize (0);
|
||||
inner->bitstream_buffer.resize (0);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_start_picture (self->decoder, picture, picture_id);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture, GstH265Slice * slice,
|
||||
GArray * ref_pic_list0, GArray * ref_pic_list1)
|
||||
gst_d3d11_h265_dec_end_picture (GstDxvaH265Decoder * decoder,
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics,
|
||||
const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
DXVA_Slice_HEVC_Short dxva_slice;
|
||||
static const guint8 start_code[] = { 0, 0, 1 };
|
||||
const size_t start_code_size = sizeof (start_code);
|
||||
|
||||
dxva_slice.BSNALunitDataLocation = inner->bitstream_buffer.size ();
|
||||
/* Includes 3 bytes start code prefix */
|
||||
dxva_slice.SliceBytesInBuffer = slice->nalu.size + start_code_size;
|
||||
dxva_slice.wBadSliceChopping = 0;
|
||||
|
||||
inner->slice_list.push_back (dxva_slice);
|
||||
|
||||
size_t pos = inner->bitstream_buffer.size ();
|
||||
inner->bitstream_buffer.resize (pos + start_code_size + slice->nalu.size);
|
||||
|
||||
/* Fill start code prefix */
|
||||
memcpy (&inner->bitstream_buffer[0] + pos, start_code, start_code_size);
|
||||
|
||||
/* Copy bitstream */
|
||||
memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
|
||||
slice->nalu.data + slice->nalu.offset, slice->nalu.size);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_end_picture (self->decoder, picture, args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder,
|
||||
GstH265Picture * picture)
|
||||
gst_d3d11_h265_dec_output_picture (GstDxvaH265Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture,
|
||||
GstVideoBufferFlags buffer_flags, gint display_width, gint display_height)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
size_t bitstream_buffer_size;
|
||||
size_t bitstream_pos;
|
||||
GstD3D11DecodeInputStreamArgs input_args;
|
||||
|
||||
GST_LOG_OBJECT (self, "end picture %p, (poc %d)",
|
||||
picture, picture->pic_order_cnt);
|
||||
|
||||
if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
|
||||
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
|
||||
|
||||
bitstream_pos = inner->bitstream_buffer.size ();
|
||||
bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
|
||||
|
||||
if (bitstream_buffer_size > bitstream_pos) {
|
||||
size_t padding = bitstream_buffer_size - bitstream_pos;
|
||||
|
||||
/* As per DXVA spec, total amount of bitstream buffer size should be
|
||||
* 128 bytes aligned. If actual data is not multiple of 128 bytes,
|
||||
* the last slice data needs to be zero-padded */
|
||||
inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
|
||||
|
||||
DXVA_Slice_HEVC_Short & slice = inner->slice_list.back ();
|
||||
slice.SliceBytesInBuffer += padding;
|
||||
}
|
||||
|
||||
input_args.picture_params = &inner->pic_params;
|
||||
input_args.picture_params_size = sizeof (DXVA_PicParams_HEVC);
|
||||
input_args.slice_control = &inner->slice_list[0];
|
||||
input_args.slice_control_size =
|
||||
sizeof (DXVA_Slice_HEVC_Short) * inner->slice_list.size ();
|
||||
input_args.bitstream = &inner->bitstream_buffer[0];
|
||||
input_args.bitstream_size = inner->bitstream_buffer.size ();
|
||||
|
||||
if (inner->submit_iq_data) {
|
||||
input_args.inverse_quantization_matrix = &inner->iq_matrix;
|
||||
input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_HEVC);
|
||||
}
|
||||
|
||||
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &input_args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_h265_dec_output_picture (GstH265Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstH265Picture * picture)
|
||||
{
|
||||
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
|
||||
GstD3D11H265DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
|
||||
picture->buffer_flags, inner->width, inner->height);
|
||||
return gst_d3d11_decoder_output_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, picture,
|
||||
buffer_flags, display_width, display_height);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -917,22 +286,22 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
gchar *feature_name;
|
||||
guint index = 0;
|
||||
guint i;
|
||||
const GUID *profile = NULL;
|
||||
const GUID *profile = nullptr;
|
||||
GTypeInfo type_info = {
|
||||
sizeof (GstD3D11H265DecClass),
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
(GClassInitFunc) gst_d3d11_h265_dec_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
sizeof (GstD3D11H265Dec),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_d3d11_h265_dec_init,
|
||||
};
|
||||
const GUID *main_10_guid = NULL;
|
||||
const GUID *main_guid = NULL;
|
||||
GstCaps *sink_caps = NULL;
|
||||
GstCaps *src_caps = NULL;
|
||||
const GUID *main_10_guid = nullptr;
|
||||
const GUID *main_guid = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *src_caps = nullptr;
|
||||
GstCaps *src_caps_copy;
|
||||
GstCaps *tmp;
|
||||
GstCapsFeatures *caps_features;
|
||||
|
@ -1038,8 +407,8 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
g_value_unset (&profile_list);
|
||||
g_value_unset (&format_list);
|
||||
} else {
|
||||
gst_caps_set_simple (sink_caps, "profile", G_TYPE_STRING, "main", NULL);
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", NULL);
|
||||
gst_caps_set_simple (sink_caps, "profile", G_TYPE_STRING, "main", nullptr);
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", nullptr);
|
||||
}
|
||||
|
||||
/* To cover both landscape and portrait, select max value */
|
||||
|
@ -1051,15 +420,16 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
/* System memory with alternate interlace-mode */
|
||||
tmp = gst_caps_copy (src_caps_copy);
|
||||
caps_features = gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED,
|
||||
NULL);
|
||||
nullptr);
|
||||
gst_caps_set_features_simple (tmp, caps_features);
|
||||
gst_caps_set_simple (tmp, "interlace-mode", G_TYPE_STRING, "alternate", NULL);
|
||||
gst_caps_set_simple (tmp, "interlace-mode", G_TYPE_STRING, "alternate",
|
||||
nullptr);
|
||||
gst_caps_append (src_caps, tmp);
|
||||
|
||||
/* D3D11 memory feature */
|
||||
tmp = gst_caps_copy (src_caps_copy);
|
||||
caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
|
||||
NULL);
|
||||
nullptr);
|
||||
gst_caps_set_features_simple (tmp, caps_features);
|
||||
gst_caps_append (src_caps, tmp);
|
||||
|
||||
|
@ -1069,9 +439,10 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
/* D3D11 memory with alternate interlace-mode */
|
||||
tmp = gst_caps_copy (src_caps_copy);
|
||||
caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
|
||||
GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
|
||||
GST_CAPS_FEATURE_FORMAT_INTERLACED, nullptr);
|
||||
gst_caps_set_features_simple (tmp, caps_features);
|
||||
gst_caps_set_simple (tmp, "interlace-mode", G_TYPE_STRING, "alternate", NULL);
|
||||
gst_caps_set_simple (tmp, "interlace-mode", G_TYPE_STRING, "alternate",
|
||||
nullptr);
|
||||
gst_caps_append (src_caps, tmp);
|
||||
#endif
|
||||
|
||||
|
@ -1092,7 +463,7 @@ gst_d3d11_h265_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
feature_name = g_strdup_printf ("d3d11h265device%ddec", index);
|
||||
}
|
||||
|
||||
type = g_type_register_static (GST_TYPE_H265_DECODER,
|
||||
type = g_type_register_static (GST_TYPE_DXVA_H265_DECODER,
|
||||
type_name, &type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_H265_DEC_H__
|
||||
#define __GST_D3D11_H265_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -29,5 +28,3 @@ void gst_d3d11_h265_dec_register (GstPlugin * plugin,
|
|||
guint rank);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_H265_DEC_H__ */
|
||||
|
|
|
@ -37,102 +37,13 @@
|
|||
#endif
|
||||
|
||||
#include "gstd3d11mpeg2dec.h"
|
||||
|
||||
#include <gst/codecs/gstmpeg2decoder.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
|
||||
/* HACK: to expose dxva data structure on UWP */
|
||||
#ifdef WINAPI_PARTITION_DESKTOP
|
||||
#undef WINAPI_PARTITION_DESKTOP
|
||||
#endif
|
||||
#define WINAPI_PARTITION_DESKTOP 1
|
||||
#include <d3d9.h>
|
||||
#include <dxva.h>
|
||||
#include <gst/dxva/gstdxvampeg2decoder.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_mpeg2_dec_debug);
|
||||
#define GST_CAT_DEFAULT gst_d3d11_mpeg2_dec_debug
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
typedef struct _GstD3D11Mpeg2DecInner
|
||||
{
|
||||
GstD3D11Device *device = nullptr;
|
||||
GstD3D11Decoder *d3d11_decoder = nullptr;
|
||||
|
||||
DXVA_PictureParameters pic_params;
|
||||
DXVA_QmatrixData iq_matrix;
|
||||
|
||||
std::vector<DXVA_SliceInfo> slice_list;
|
||||
std::vector<guint8> bitstream_buffer;
|
||||
|
||||
gboolean submit_iq_data;
|
||||
|
||||
gint width = 0;
|
||||
gint height = 0;
|
||||
guint width_in_mb = 0;
|
||||
guint height_in_mb = 0;
|
||||
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
GstMpegVideoSequenceHdr seq;
|
||||
GstMpegVideoProfile profile = GST_MPEG_VIDEO_PROFILE_MAIN;
|
||||
gboolean interlaced = FALSE;
|
||||
} GstD3D11Mpeg2DecInner;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
typedef struct _GstD3D11Mpeg2Dec
|
||||
{
|
||||
GstMpeg2Decoder parent;
|
||||
GstD3D11Mpeg2DecInner *inner;
|
||||
} GstD3D11Mpeg2Dec;
|
||||
|
||||
typedef struct _GstD3D11Mpeg2DecClass
|
||||
{
|
||||
GstMpeg2DecoderClass parent_class;
|
||||
GstD3D11DecoderSubClassData class_data;
|
||||
} GstD3D11Mpeg2DecClass;
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
#define GST_D3D11_MPEG2_DEC(object) ((GstD3D11Mpeg2Dec *) (object))
|
||||
#define GST_D3D11_MPEG2_DEC_GET_CLASS(object) \
|
||||
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11Mpeg2DecClass))
|
||||
|
||||
static void gst_d3d11_mpeg2_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_mpeg2_dec_finalize (GObject * object);
|
||||
static void gst_d3d11_mpeg2_dec_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
|
||||
static gboolean gst_d3d11_mpeg2_dec_open (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_mpeg2_dec_close (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_mpeg2_dec_negotiate (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_mpeg2_dec_decide_allocation (GstVideoDecoder *
|
||||
decoder, GstQuery * query);
|
||||
static gboolean gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder,
|
||||
GstQuery * query);
|
||||
static gboolean gst_d3d11_mpeg2_dec_sink_event (GstVideoDecoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
/* GstMpeg2Decoder */
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder *
|
||||
decoder, const GstMpegVideoSequenceHdr * seq,
|
||||
const GstMpegVideoSequenceExt * seq_ext,
|
||||
const GstMpegVideoSequenceDisplayExt * seq_display_ext,
|
||||
const GstMpegVideoSequenceScalableExt * seq_scalable_ext,
|
||||
gint max_dpb_size);
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstMpeg2Picture * picture);
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * first_field, GstMpeg2Picture * second_field);
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder *
|
||||
decoder, GstMpeg2Picture * picture, GstMpeg2Slice * slice,
|
||||
GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture);
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_decode_slice (GstMpeg2Decoder *
|
||||
decoder, GstMpeg2Picture * picture, GstMpeg2Slice * slice);
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstMpeg2Picture * picture);
|
||||
GST_D3D11_DECODER_DEFINE_TYPE_FULL (GstD3D11Mpeg2Dec, gst_d3d11_mpeg2_dec,
|
||||
GST, D3D11_MPEG2_DEC, GstDxvaMpeg2Decoder);
|
||||
|
||||
static void
|
||||
gst_d3d11_mpeg2_dec_class_init (GstD3D11Mpeg2DecClass * klass, gpointer data)
|
||||
|
@ -140,11 +51,10 @@ gst_d3d11_mpeg2_dec_class_init (GstD3D11Mpeg2DecClass * klass, gpointer data)
|
|||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
|
||||
GstMpeg2DecoderClass *mpeg2decoder_class = GST_MPEG2_DECODER_CLASS (klass);
|
||||
GstDxvaMpeg2DecoderClass *dxva_class = GST_DXVA_MPEG2_DECODER_CLASS (klass);
|
||||
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
|
||||
|
||||
gobject_class->get_property = gst_d3d11_mpeg2_dec_get_property;
|
||||
gobject_class->finalize = gst_d3d11_mpeg2_dec_finalize;
|
||||
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_set_context);
|
||||
|
@ -167,30 +77,28 @@ gst_d3d11_mpeg2_dec_class_init (GstD3D11Mpeg2DecClass * klass, gpointer data)
|
|||
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_negotiate);
|
||||
decoder_class->decide_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_decide_allocation);
|
||||
decoder_class->sink_query =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_sink_query);
|
||||
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_src_query);
|
||||
decoder_class->sink_event =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_sink_event);
|
||||
|
||||
mpeg2decoder_class->new_sequence =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_sequence);
|
||||
mpeg2decoder_class->new_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_picture);
|
||||
mpeg2decoder_class->new_field_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_field_picture);
|
||||
mpeg2decoder_class->start_picture =
|
||||
dxva_class->configure = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_configure);
|
||||
dxva_class->new_picture = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_picture);
|
||||
dxva_class->duplicate_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_duplicate_picture);
|
||||
dxva_class->get_picture_id =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_get_picture_id);
|
||||
dxva_class->start_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_start_picture);
|
||||
mpeg2decoder_class->decode_slice =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_decode_slice);
|
||||
mpeg2decoder_class->end_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_end_picture);
|
||||
mpeg2decoder_class->output_picture =
|
||||
dxva_class->end_picture = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_end_picture);
|
||||
dxva_class->output_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_output_picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_mpeg2_dec_init (GstD3D11Mpeg2Dec * self)
|
||||
{
|
||||
self->inner = new GstD3D11Mpeg2DecInner ();
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -203,26 +111,15 @@ gst_d3d11_mpeg2_dec_get_property (GObject * object, guint prop_id,
|
|||
gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_mpeg2_dec_finalize (GObject * object)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (object);
|
||||
|
||||
delete self->inner;
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_mpeg2_dec_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (element);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
||||
context, cdata->adapter_luid, &inner->device);
|
||||
context, cdata->adapter_luid, &self->device);
|
||||
|
||||
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
||||
}
|
||||
|
@ -231,27 +128,20 @@ static gboolean
|
|||
gst_d3d11_mpeg2_dec_open (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
if (!gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &inner->device, &inner->d3d11_decoder)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open decoder");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
return gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &self->device, &self->decoder);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_mpeg2_dec_close (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
gst_clear_object (&inner->d3d11_decoder);
|
||||
gst_clear_object (&inner->device);
|
||||
gst_clear_object (&self->decoder);
|
||||
gst_clear_object (&self->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -260,9 +150,8 @@ static gboolean
|
|||
gst_d3d11_mpeg2_dec_negotiate (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
|
||||
if (!gst_d3d11_decoder_negotiate (self->decoder, decoder))
|
||||
return FALSE;
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
||||
|
@ -273,27 +162,42 @@ gst_d3d11_mpeg2_dec_decide_allocation (GstVideoDecoder * decoder,
|
|||
GstQuery * query)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
|
||||
decoder, query)) {
|
||||
if (!gst_d3d11_decoder_decide_allocation (self->decoder, decoder, query))
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
|
||||
(decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
gst_d3d11_mpeg2_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, inner->device)) {
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
|
@ -308,386 +212,82 @@ static gboolean
|
|||
gst_d3d11_mpeg2_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (self->decoder)
|
||||
gst_d3d11_decoder_sink_event (self->decoder, event);
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
|
||||
const GstMpegVideoSequenceHdr * seq,
|
||||
const GstMpegVideoSequenceExt * seq_ext,
|
||||
const GstMpegVideoSequenceDisplayExt * seq_display_ext,
|
||||
const GstMpegVideoSequenceScalableExt * seq_scalable_ext, gint max_dpb_size)
|
||||
gst_d3d11_mpeg2_dec_configure (GstDxvaMpeg2Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info,
|
||||
gint crop_x, gint crop_y, gint coded_width, gint coded_height,
|
||||
gint max_dpb_size)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
gboolean interlaced;
|
||||
gboolean modified = FALSE;
|
||||
gint width, height;
|
||||
GstMpegVideoProfile mpeg_profile;
|
||||
|
||||
GST_LOG_OBJECT (self, "new sequence");
|
||||
|
||||
interlaced = seq_ext ? !seq_ext->progressive : FALSE;
|
||||
if (inner->interlaced != interlaced) {
|
||||
GST_INFO_OBJECT (self, "interlaced sequence change");
|
||||
inner->interlaced = interlaced;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
width = seq->width;
|
||||
height = seq->height;
|
||||
if (seq_ext) {
|
||||
width = (width & 0x0fff) | ((guint32) seq_ext->horiz_size_ext << 12);
|
||||
height = (height & 0x0fff) | ((guint32) seq_ext->vert_size_ext << 12);
|
||||
}
|
||||
|
||||
if (inner->width != width || inner->height != height) {
|
||||
GST_INFO_OBJECT (self, "resolution change %dx%d -> %dx%d",
|
||||
inner->width, inner->height, width, height);
|
||||
inner->width = width;
|
||||
inner->height = height;
|
||||
inner->width_in_mb = GST_ROUND_UP_16 (width) >> 4;
|
||||
inner->height_in_mb = GST_ROUND_UP_16 (height) >> 4;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
mpeg_profile = GST_MPEG_VIDEO_PROFILE_MAIN;
|
||||
if (seq_ext)
|
||||
mpeg_profile = (GstMpegVideoProfile) seq_ext->profile;
|
||||
|
||||
if (mpeg_profile != GST_MPEG_VIDEO_PROFILE_MAIN &&
|
||||
mpeg_profile != GST_MPEG_VIDEO_PROFILE_SIMPLE) {
|
||||
GST_ERROR_OBJECT (self, "Cannot support profile %d", mpeg_profile);
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (inner->profile != mpeg_profile) {
|
||||
GST_INFO_OBJECT (self, "Profile change %d -> %d",
|
||||
inner->profile, mpeg_profile);
|
||||
inner->profile = mpeg_profile;
|
||||
modified = TRUE;
|
||||
}
|
||||
|
||||
if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
|
||||
GstVideoInfo info;
|
||||
|
||||
/* FIXME: support I420 */
|
||||
inner->out_format = GST_VIDEO_FORMAT_NV12;
|
||||
|
||||
gst_video_info_set_format (&info,
|
||||
inner->out_format, inner->width, inner->height);
|
||||
if (inner->interlaced)
|
||||
GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
|
||||
|
||||
if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
|
||||
decoder->input_state, &info, 0, 0,
|
||||
inner->width, inner->height, max_dpb_size)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create decoder");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
|
||||
GST_WARNING_OBJECT (self, "Failed to negotiate with downstream");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_configure (self->decoder, input_state,
|
||||
info, crop_x, crop_y, coded_width, coded_height, max_dpb_size);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
|
||||
gst_d3d11_mpeg2_dec_new_picture (GstDxvaMpeg2Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
|
||||
return gst_d3d11_decoder_new_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * first_field, GstMpeg2Picture * second_field)
|
||||
gst_d3d11_mpeg2_dec_duplicate_picture (GstDxvaMpeg2Decoder * decoder,
|
||||
GstCodecPicture * src, GstCodecPicture * dst)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstBuffer *view_buffer;
|
||||
|
||||
view_buffer = (GstBuffer *)
|
||||
gst_mpeg2_picture_get_user_data (first_field);
|
||||
|
||||
if (!view_buffer) {
|
||||
GST_WARNING_OBJECT (self, "First picture does not have output view buffer");
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
GST_LOG_OBJECT (self, "New field picture with buffer %" GST_PTR_FORMAT,
|
||||
view_buffer);
|
||||
|
||||
gst_mpeg2_picture_set_user_data (second_field,
|
||||
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_duplicate_picture (self->decoder, src, dst);
|
||||
}
|
||||
|
||||
static inline WORD
|
||||
_pack_f_codes (guint8 f_code[2][2])
|
||||
static guint8
|
||||
gst_d3d11_mpeg2_dec_get_picture_id (GstDxvaMpeg2Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
return (((WORD) f_code[0][0] << 12)
|
||||
| ((WORD) f_code[0][1] << 8)
|
||||
| ((WORD) f_code[1][0] << 4)
|
||||
| (f_code[1][1]));
|
||||
}
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
|
||||
static inline WORD
|
||||
_pack_pce_elements (GstMpeg2Slice * slice)
|
||||
{
|
||||
return (((WORD) slice->pic_ext->intra_dc_precision << 14)
|
||||
| ((WORD) slice->pic_ext->picture_structure << 12)
|
||||
| ((WORD) slice->pic_ext->top_field_first << 11)
|
||||
| ((WORD) slice->pic_ext->frame_pred_frame_dct << 10)
|
||||
| ((WORD) slice->pic_ext->concealment_motion_vectors << 9)
|
||||
| ((WORD) slice->pic_ext->q_scale_type << 8)
|
||||
| ((WORD) slice->pic_ext->intra_vlc_format << 7)
|
||||
| ((WORD) slice->pic_ext->alternate_scan << 6)
|
||||
| ((WORD) slice->pic_ext->repeat_first_field << 5)
|
||||
| ((WORD) slice->pic_ext->chroma_420_type << 4)
|
||||
| ((WORD) slice->pic_ext->progressive_frame << 3));
|
||||
return gst_d3d11_decoder_get_picture_id (self->decoder, picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * picture, GstMpeg2Slice * slice,
|
||||
GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture)
|
||||
gst_d3d11_mpeg2_dec_start_picture (GstDxvaMpeg2Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
GstD3D11Decoder *d3d11_decoder = inner->d3d11_decoder;
|
||||
DXVA_PictureParameters *pic_params = &inner->pic_params;
|
||||
DXVA_QmatrixData *iq_matrix = &inner->iq_matrix;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
ID3D11VideoDecoderOutputView *other_view;
|
||||
guint8 view_id = 0xff;
|
||||
guint8 other_view_id = 0xff;
|
||||
gboolean is_field =
|
||||
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (pic_params, 0, sizeof (DXVA_PictureParameters));
|
||||
memset (iq_matrix, 0, sizeof (DXVA_QmatrixData));
|
||||
|
||||
/* Fill DXVA_PictureParameters */
|
||||
pic_params->wDecodedPictureIndex = view_id;
|
||||
pic_params->wForwardRefPictureIndex = 0xffff;
|
||||
pic_params->wBackwardRefPictureIndex = 0xffff;
|
||||
|
||||
switch (picture->type) {
|
||||
case GST_MPEG_VIDEO_PICTURE_TYPE_B:{
|
||||
if (next_picture) {
|
||||
other_view =
|
||||
gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (next_picture), &other_view_id);
|
||||
if (other_view)
|
||||
pic_params->wBackwardRefPictureIndex = other_view_id;
|
||||
}
|
||||
}
|
||||
/* fall-through */
|
||||
case GST_MPEG_VIDEO_PICTURE_TYPE_P:{
|
||||
if (prev_picture) {
|
||||
other_view =
|
||||
gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (prev_picture), &other_view_id);
|
||||
if (other_view)
|
||||
pic_params->wForwardRefPictureIndex = other_view_id;
|
||||
}
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
pic_params->wPicWidthInMBminus1 = inner->width_in_mb - 1;
|
||||
pic_params->wPicHeightInMBminus1 = (inner->height_in_mb >> is_field) - 1;
|
||||
pic_params->bMacroblockWidthMinus1 = 15;
|
||||
pic_params->bMacroblockHeightMinus1 = 15;
|
||||
pic_params->bBlockWidthMinus1 = 7;
|
||||
pic_params->bBlockHeightMinus1 = 7;
|
||||
pic_params->bBPPminus1 = 7;
|
||||
pic_params->bPicStructure = (BYTE) picture->structure;
|
||||
if (picture->first_field && is_field) {
|
||||
pic_params->bSecondField = TRUE;
|
||||
}
|
||||
pic_params->bPicIntra = picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_I;
|
||||
pic_params->bPicBackwardPrediction =
|
||||
picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_B;
|
||||
/* FIXME: 1 -> 4:2:0, 2 -> 4:2:2, 3 -> 4:4:4 */
|
||||
pic_params->bChromaFormat = 1;
|
||||
pic_params->bPicScanFixed = 1;
|
||||
pic_params->bPicScanMethod = slice->pic_ext->alternate_scan;
|
||||
pic_params->wBitstreamFcodes = _pack_f_codes (slice->pic_ext->f_code);
|
||||
pic_params->wBitstreamPCEelements = _pack_pce_elements (slice);
|
||||
|
||||
/* Fill DXVA_QmatrixData */
|
||||
if (slice->quant_matrix &&
|
||||
/* The value in bNewQmatrix[0] and bNewQmatrix[1] must not both be zero.
|
||||
* https://docs.microsoft.com/en-us/windows-hardware/drivers/ddi/dxva/ns-dxva-_dxva_qmatrixdata
|
||||
*/
|
||||
(slice->quant_matrix->load_intra_quantiser_matrix ||
|
||||
slice->quant_matrix->load_non_intra_quantiser_matrix)) {
|
||||
GstMpegVideoQuantMatrixExt *quant_matrix = slice->quant_matrix;
|
||||
|
||||
if (quant_matrix->load_intra_quantiser_matrix) {
|
||||
iq_matrix->bNewQmatrix[0] = 1;
|
||||
for (guint i = 0; i < 64; i++) {
|
||||
iq_matrix->Qmatrix[0][i] = quant_matrix->intra_quantiser_matrix[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (quant_matrix->load_non_intra_quantiser_matrix) {
|
||||
iq_matrix->bNewQmatrix[1] = 1;
|
||||
for (guint i = 0; i < 64; i++) {
|
||||
iq_matrix->Qmatrix[1][i] = quant_matrix->non_intra_quantiser_matrix[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (quant_matrix->load_chroma_intra_quantiser_matrix) {
|
||||
iq_matrix->bNewQmatrix[2] = 1;
|
||||
for (guint i = 0; i < 64; i++) {
|
||||
iq_matrix->Qmatrix[2][i] =
|
||||
quant_matrix->chroma_intra_quantiser_matrix[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (quant_matrix->load_chroma_non_intra_quantiser_matrix) {
|
||||
iq_matrix->bNewQmatrix[3] = 1;
|
||||
for (guint i = 0; i < 64; i++) {
|
||||
iq_matrix->Qmatrix[3][i] =
|
||||
quant_matrix->chroma_non_intra_quantiser_matrix[i];
|
||||
}
|
||||
}
|
||||
|
||||
inner->submit_iq_data = TRUE;
|
||||
} else {
|
||||
inner->submit_iq_data = FALSE;
|
||||
}
|
||||
|
||||
inner->slice_list.resize (0);
|
||||
inner->bitstream_buffer.resize (0);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_start_picture (self->decoder, picture, picture_id);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_decode_slice (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * picture, GstMpeg2Slice * slice)
|
||||
gst_d3d11_mpeg2_dec_end_picture (GstDxvaMpeg2Decoder * decoder,
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics,
|
||||
const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
GstMpegVideoSliceHdr *header = &slice->header;
|
||||
GstMpegVideoPacket *packet = &slice->packet;
|
||||
DXVA_SliceInfo slice_info = { 0, };
|
||||
|
||||
g_assert (packet->offset >= 4);
|
||||
|
||||
slice_info.wHorizontalPosition = header->mb_column;
|
||||
slice_info.wVerticalPosition = header->mb_row;
|
||||
/* including start code 4 bytes */
|
||||
slice_info.dwSliceBitsInBuffer = 8 * (packet->size + 4);
|
||||
slice_info.dwSliceDataLocation = inner->bitstream_buffer.size ();
|
||||
/* XXX: We don't have information about the number of MBs in this slice.
|
||||
* Just store offset here, and actual number will be calculated later */
|
||||
slice_info.wNumberMBsInSlice =
|
||||
(header->mb_row * inner->width_in_mb) + header->mb_column;
|
||||
slice_info.wQuantizerScaleCode = header->quantiser_scale_code;
|
||||
slice_info.wMBbitOffset = header->header_size + 32;
|
||||
|
||||
inner->slice_list.push_back (slice_info);
|
||||
|
||||
size_t pos = inner->bitstream_buffer.size ();
|
||||
inner->bitstream_buffer.resize (pos + packet->size + 4);
|
||||
memcpy (&inner->bitstream_buffer[0] + pos, packet->data + packet->offset - 4,
|
||||
packet->size + 4);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_end_picture (self->decoder, picture, args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
|
||||
GstMpeg2Picture * picture)
|
||||
gst_d3d11_mpeg2_dec_output_picture (GstDxvaMpeg2Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture,
|
||||
GstVideoBufferFlags buffer_flags, gint display_width, gint display_height)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
GstD3D11DecodeInputStreamArgs input_args;
|
||||
gboolean is_field =
|
||||
picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
|
||||
guint mb_count = inner->width_in_mb * (inner->height_in_mb >> is_field);
|
||||
|
||||
if (inner->bitstream_buffer.empty ()) {
|
||||
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
|
||||
|
||||
DXVA_SliceInfo *first = &inner->slice_list[0];
|
||||
for (size_t i = 0; i < inner->slice_list.size (); i++) {
|
||||
DXVA_SliceInfo *slice = first + i;
|
||||
|
||||
/* Update the number of MBs per slice */
|
||||
if (i == inner->slice_list.size () - 1) {
|
||||
slice->wNumberMBsInSlice = mb_count - slice->wNumberMBsInSlice;
|
||||
} else {
|
||||
DXVA_SliceInfo *next = first + i + 1;
|
||||
slice->wNumberMBsInSlice =
|
||||
next->wNumberMBsInSlice - slice->wNumberMBsInSlice;
|
||||
}
|
||||
}
|
||||
|
||||
input_args.picture_params = &inner->pic_params;
|
||||
input_args.picture_params_size = sizeof (DXVA_PictureParameters);
|
||||
input_args.slice_control = &inner->slice_list[0];
|
||||
input_args.slice_control_size =
|
||||
sizeof (DXVA_SliceInfo) * inner->slice_list.size ();
|
||||
input_args.bitstream = &inner->bitstream_buffer[0];
|
||||
input_args.bitstream_size = inner->bitstream_buffer.size ();
|
||||
if (inner->submit_iq_data) {
|
||||
input_args.inverse_quantization_matrix = &inner->iq_matrix;
|
||||
input_args.inverse_quantization_matrix_size = sizeof (DXVA_QmatrixData);
|
||||
}
|
||||
|
||||
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &input_args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
|
||||
{
|
||||
GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
|
||||
GstD3D11Mpeg2DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
|
||||
picture->buffer_flags, inner->width, inner->height);
|
||||
return gst_d3d11_decoder_output_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, picture,
|
||||
buffer_flags, display_width, display_height);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -700,18 +300,18 @@ gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
guint index = 0;
|
||||
GTypeInfo type_info = {
|
||||
sizeof (GstD3D11Mpeg2DecClass),
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
(GClassInitFunc) gst_d3d11_mpeg2_dec_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
sizeof (GstD3D11Mpeg2Dec),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_d3d11_mpeg2_dec_init,
|
||||
};
|
||||
const GUID *supported_profile = NULL;
|
||||
GstCaps *sink_caps = NULL;
|
||||
GstCaps *src_caps = NULL;
|
||||
const GUID *supported_profile = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *src_caps = nullptr;
|
||||
|
||||
if (!gst_d3d11_decoder_get_supported_decoder_profile (device,
|
||||
GST_DXVA_CODEC_MPEG2, GST_VIDEO_FORMAT_NV12, &supported_profile)) {
|
||||
|
@ -726,7 +326,7 @@ gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY "); video/x-raw");
|
||||
|
||||
/* NOTE: We are supporting only 4:2:0, main or simple profiles */
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", NULL);
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", nullptr);
|
||||
|
||||
type_info.class_data =
|
||||
gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_MPEG2,
|
||||
|
@ -743,7 +343,7 @@ gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
feature_name = g_strdup_printf ("d3d11mpeg2device%ddec", index);
|
||||
}
|
||||
|
||||
type = g_type_register_static (GST_TYPE_MPEG2_DECODER,
|
||||
type = g_type_register_static (GST_TYPE_DXVA_MPEG2_DECODER,
|
||||
type_name, &type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_MPEG2_DEC_H__
|
||||
#define __GST_D3D11_MPEG2_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -29,5 +28,3 @@ void gst_d3d11_mpeg2_dec_register (GstPlugin * plugin,
|
|||
guint rank);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_MPEG2_DEC_H__ */
|
||||
|
|
|
@ -37,88 +37,13 @@
|
|||
#endif
|
||||
|
||||
#include "gstd3d11vp8dec.h"
|
||||
|
||||
#include <gst/codecs/gstvp8decoder.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
|
||||
/* HACK: to expose dxva data structure on UWP */
|
||||
#ifdef WINAPI_PARTITION_DESKTOP
|
||||
#undef WINAPI_PARTITION_DESKTOP
|
||||
#endif
|
||||
#define WINAPI_PARTITION_DESKTOP 1
|
||||
#include <d3d9.h>
|
||||
#include <dxva.h>
|
||||
#include <gst/dxva/gstdxvavp8decoder.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_vp8_dec_debug);
|
||||
#define GST_CAT_DEFAULT gst_d3d11_vp8_dec_debug
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
typedef struct _GstD3D11Vp8DecInner
|
||||
{
|
||||
GstD3D11Device *device = nullptr;
|
||||
GstD3D11Decoder *d3d11_decoder = nullptr;
|
||||
|
||||
DXVA_PicParams_VP8 pic_params;
|
||||
DXVA_Slice_VPx_Short slice;
|
||||
|
||||
/* In case of VP8, there's only one slice per picture so we don't
|
||||
* need this bitstream buffer, but this will be used for 128 bytes alignment */
|
||||
std::vector<guint8> bitstream_buffer;
|
||||
|
||||
guint width = 0;
|
||||
guint height = 0;
|
||||
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
} GstD3D11Vp8DecInner;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
typedef struct _GstD3D11Vp8Dec
|
||||
{
|
||||
GstVp8Decoder parent;
|
||||
GstD3D11Vp8DecInner *inner;
|
||||
} GstD3D11Vp8Dec;
|
||||
|
||||
typedef struct _GstD3D11Vp8DecClass
|
||||
{
|
||||
GstVp8DecoderClass parent_class;
|
||||
GstD3D11DecoderSubClassData class_data;
|
||||
} GstD3D11Vp8DecClass;
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
#define GST_D3D11_VP8_DEC(object) ((GstD3D11Vp8Dec *) (object))
|
||||
#define GST_D3D11_VP8_DEC_GET_CLASS(object) \
|
||||
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11Vp8DecClass))
|
||||
|
||||
static void gst_d3d11_vp8_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_vp8_dec_finalize (GObject * object);
|
||||
static void gst_d3d11_vp8_dec_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
|
||||
static gboolean gst_d3d11_vp8_dec_open (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp8_dec_close (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp8_dec_negotiate (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp8_dec_decide_allocation (GstVideoDecoder *
|
||||
decoder, GstQuery * query);
|
||||
static gboolean gst_d3d11_vp8_dec_src_query (GstVideoDecoder * decoder,
|
||||
GstQuery * query);
|
||||
static gboolean gst_d3d11_vp8_sink_event (GstVideoDecoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
/* GstVp8Decoder */
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_new_sequence (GstVp8Decoder * decoder,
|
||||
const GstVp8FrameHdr * frame_hdr, gint max_dpb_size);
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp8Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
|
||||
GstVp8Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
|
||||
GstVp8Picture * picture, GstVp8Parser * parser);
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder,
|
||||
GstVp8Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp8_dec_output_picture (GstVp8Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstVp8Picture * picture);
|
||||
GST_D3D11_DECODER_DEFINE_TYPE (GstD3D11Vp8Dec, gst_d3d11_vp8_dec,
|
||||
GST, D3D11_VP8_DEC, GstDxvaVp8Decoder);
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
|
||||
|
@ -126,11 +51,10 @@ gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
|
|||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
|
||||
GstVp8DecoderClass *vp8decoder_class = GST_VP8_DECODER_CLASS (klass);
|
||||
GstDxvaVp8DecoderClass *dxva_class = GST_DXVA_VP8_DECODER_CLASS (klass);
|
||||
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
|
||||
|
||||
gobject_class->get_property = gst_d3d11_vp8_dec_get_property;
|
||||
gobject_class->finalize = gst_d3d11_vp8_dec_finalize;
|
||||
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_set_context);
|
||||
|
@ -153,27 +77,24 @@ gst_d3d11_vp8_dec_class_init (GstD3D11Vp8DecClass * klass, gpointer data)
|
|||
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_negotiate);
|
||||
decoder_class->decide_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_decide_allocation);
|
||||
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_sink_query);
|
||||
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_src_query);
|
||||
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_sink_event);
|
||||
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_sink_event);
|
||||
|
||||
vp8decoder_class->new_sequence =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_new_sequence);
|
||||
vp8decoder_class->new_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_new_picture);
|
||||
vp8decoder_class->start_picture =
|
||||
dxva_class->configure = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_configure);
|
||||
dxva_class->new_picture = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_new_picture);
|
||||
dxva_class->get_picture_id =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_get_picture_id);
|
||||
dxva_class->start_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_start_picture);
|
||||
vp8decoder_class->decode_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_decode_picture);
|
||||
vp8decoder_class->end_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_end_picture);
|
||||
vp8decoder_class->output_picture =
|
||||
dxva_class->end_picture = GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_end_picture);
|
||||
dxva_class->output_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp8_dec_output_picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_init (GstD3D11Vp8Dec * self)
|
||||
{
|
||||
self->inner = new GstD3D11Vp8DecInner ();
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -186,26 +107,15 @@ gst_d3d11_vp8_dec_get_property (GObject * object, guint prop_id,
|
|||
gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_finalize (GObject * object)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (object);
|
||||
|
||||
delete self->inner;
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (element);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
GstD3D11Vp8DecClass *klass = GST_D3D11_VP8_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
||||
context, cdata->adapter_luid, &inner->device);
|
||||
context, cdata->adapter_luid, &self->device);
|
||||
|
||||
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
||||
}
|
||||
|
@ -214,27 +124,20 @@ static gboolean
|
|||
gst_d3d11_vp8_dec_open (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
GstD3D11Vp8DecClass *klass = GST_D3D11_VP8_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
if (!gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &inner->device, &inner->d3d11_decoder)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open decoder");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
return gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &self->device, &self->decoder);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp8_dec_close (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
gst_clear_object (&inner->d3d11_decoder);
|
||||
gst_clear_object (&inner->device);
|
||||
gst_clear_object (&self->decoder);
|
||||
gst_clear_object (&self->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -243,9 +146,8 @@ static gboolean
|
|||
gst_d3d11_vp8_dec_negotiate (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
|
||||
if (!gst_d3d11_decoder_negotiate (self->decoder, decoder))
|
||||
return FALSE;
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
||||
|
@ -256,27 +158,42 @@ gst_d3d11_vp8_dec_decide_allocation (GstVideoDecoder * decoder,
|
|||
GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder, decoder,
|
||||
query)) {
|
||||
if (!gst_d3d11_decoder_decide_allocation (self->decoder, decoder, query))
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
|
||||
(decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp8_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
gst_d3d11_vp8_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, inner->device)) {
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp8_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
|
@ -288,306 +205,76 @@ gst_d3d11_vp8_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp8_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
gst_d3d11_vp8_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (self->decoder)
|
||||
gst_d3d11_decoder_sink_event (self->decoder, event);
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_new_sequence (GstVp8Decoder * decoder,
|
||||
const GstVp8FrameHdr * frame_hdr, gint max_dpb_size)
|
||||
gst_d3d11_vp8_dec_configure (GstDxvaVp8Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info,
|
||||
gint crop_x, gint crop_y, gint coded_width, gint coded_height,
|
||||
gint max_dpb_size)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
GstVideoInfo info;
|
||||
|
||||
GST_LOG_OBJECT (self, "new sequence");
|
||||
|
||||
/* FIXME: support I420 */
|
||||
inner->out_format = GST_VIDEO_FORMAT_NV12;
|
||||
inner->width = frame_hdr->width;
|
||||
inner->height = frame_hdr->height;
|
||||
|
||||
gst_video_info_set_format (&info,
|
||||
inner->out_format, inner->width, inner->height);
|
||||
|
||||
if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
|
||||
decoder->input_state, &info, 0, 0, inner->width, inner->height,
|
||||
max_dpb_size)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create decoder");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
|
||||
GST_WARNING_OBJECT (self, "Failed to negotiate with downstream");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_configure (self->decoder, input_state,
|
||||
info, crop_x, crop_y, coded_width, coded_height, max_dpb_size);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_new_picture (GstVp8Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp8Picture * picture)
|
||||
gst_d3d11_vp8_dec_new_picture (GstDxvaVp8Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
|
||||
return gst_d3d11_decoder_new_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), picture);
|
||||
}
|
||||
|
||||
static guint8
|
||||
gst_d3d11_vp8_dec_get_picture_id (GstDxvaVp8Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
|
||||
return gst_d3d11_decoder_get_picture_id (self->decoder, picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_start_picture (GstVp8Decoder * decoder,
|
||||
GstVp8Picture * picture)
|
||||
gst_d3d11_vp8_dec_start_picture (GstDxvaVp8Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
inner->bitstream_buffer.resize (0);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_copy_frame_params (GstD3D11Vp8Dec * self,
|
||||
GstVp8Picture * picture, GstVp8Parser * parser, DXVA_PicParams_VP8 * params)
|
||||
{
|
||||
const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
|
||||
gint i;
|
||||
|
||||
/* 0: keyframe, 1: inter */
|
||||
params->frame_type = !frame_hdr->key_frame;
|
||||
params->version = frame_hdr->version;
|
||||
params->show_frame = frame_hdr->show_frame;
|
||||
params->clamp_type = frame_hdr->clamping_type;
|
||||
|
||||
params->filter_type = frame_hdr->filter_type;
|
||||
params->filter_level = frame_hdr->loop_filter_level;
|
||||
params->sharpness_level = frame_hdr->sharpness_level;
|
||||
params->mode_ref_lf_delta_enabled =
|
||||
parser->mb_lf_adjust.loop_filter_adj_enable;
|
||||
params->mode_ref_lf_delta_update =
|
||||
parser->mb_lf_adjust.mode_ref_lf_delta_update;
|
||||
for (i = 0; i < 4; i++) {
|
||||
params->ref_lf_deltas[i] = parser->mb_lf_adjust.ref_frame_delta[i];
|
||||
params->mode_lf_deltas[i] = parser->mb_lf_adjust.mb_mode_delta[i];
|
||||
}
|
||||
params->log2_nbr_of_dct_partitions = frame_hdr->log2_nbr_of_dct_partitions;
|
||||
params->base_qindex = frame_hdr->quant_indices.y_ac_qi;
|
||||
params->y1dc_delta_q = frame_hdr->quant_indices.y_dc_delta;
|
||||
params->y2dc_delta_q = frame_hdr->quant_indices.y2_dc_delta;
|
||||
params->y2ac_delta_q = frame_hdr->quant_indices.y2_ac_delta;
|
||||
params->uvdc_delta_q = frame_hdr->quant_indices.uv_dc_delta;
|
||||
params->uvac_delta_q = frame_hdr->quant_indices.uv_ac_delta;
|
||||
|
||||
params->ref_frame_sign_bias_golden = frame_hdr->sign_bias_golden;
|
||||
params->ref_frame_sign_bias_altref = frame_hdr->sign_bias_alternate;
|
||||
|
||||
params->refresh_entropy_probs = frame_hdr->refresh_entropy_probs;
|
||||
|
||||
memcpy (params->vp8_coef_update_probs, frame_hdr->token_probs.prob,
|
||||
sizeof (frame_hdr->token_probs.prob));
|
||||
|
||||
params->mb_no_coeff_skip = frame_hdr->mb_no_skip_coeff;
|
||||
params->prob_skip_false = frame_hdr->prob_skip_false;
|
||||
params->prob_intra = frame_hdr->prob_intra;
|
||||
params->prob_last = frame_hdr->prob_last;
|
||||
params->prob_golden = frame_hdr->prob_gf;
|
||||
|
||||
memcpy (params->intra_16x16_prob, frame_hdr->mode_probs.y_prob,
|
||||
sizeof (frame_hdr->mode_probs.y_prob));
|
||||
memcpy (params->intra_chroma_prob, frame_hdr->mode_probs.uv_prob,
|
||||
sizeof (frame_hdr->mode_probs.uv_prob));
|
||||
memcpy (params->vp8_mv_update_probs, frame_hdr->mv_probs.prob,
|
||||
sizeof (frame_hdr->mv_probs.prob));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_copy_reference_frames (GstD3D11Vp8Dec * self,
|
||||
DXVA_PicParams_VP8 * params)
|
||||
{
|
||||
GstVp8Decoder *decoder = GST_VP8_DECODER (self);
|
||||
GstD3D11Decoder *d3d11_decoder = self->inner->d3d11_decoder;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
|
||||
if (decoder->alt_ref_picture) {
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (decoder->alt_ref_picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "picture does not have output view handle");
|
||||
return;
|
||||
}
|
||||
|
||||
params->alt_fb_idx.Index7Bits = view_id;
|
||||
} else {
|
||||
params->alt_fb_idx.bPicEntry = 0xff;
|
||||
}
|
||||
|
||||
if (decoder->golden_ref_picture) {
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (decoder->golden_ref_picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "picture does not have output view handle");
|
||||
return;
|
||||
}
|
||||
|
||||
params->gld_fb_idx.Index7Bits = view_id;
|
||||
} else {
|
||||
params->gld_fb_idx.bPicEntry = 0xff;
|
||||
}
|
||||
|
||||
if (decoder->last_picture) {
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (d3d11_decoder,
|
||||
GST_CODEC_PICTURE (decoder->last_picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "picture does not have output view handle");
|
||||
return;
|
||||
}
|
||||
|
||||
params->lst_fb_idx.Index7Bits = view_id;
|
||||
} else {
|
||||
params->lst_fb_idx.bPicEntry = 0xff;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp8_dec_copy_segmentation_params (GstD3D11Vp8Dec * self,
|
||||
GstVp8Parser * parser, DXVA_PicParams_VP8 * params)
|
||||
{
|
||||
const GstVp8Segmentation *seg = &parser->segmentation;
|
||||
gint i;
|
||||
|
||||
params->stVP8Segments.segmentation_enabled = seg->segmentation_enabled;
|
||||
params->stVP8Segments.update_mb_segmentation_map =
|
||||
seg->update_mb_segmentation_map;
|
||||
params->stVP8Segments.update_mb_segmentation_data =
|
||||
seg->update_segment_feature_data;
|
||||
params->stVP8Segments.mb_segement_abs_delta = seg->segment_feature_mode;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
params->stVP8Segments.segment_feature_data[0][i] =
|
||||
seg->quantizer_update_value[i];
|
||||
}
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
params->stVP8Segments.segment_feature_data[1][i] = seg->lf_update_value[i];
|
||||
}
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
params->stVP8Segments.mb_segment_tree_probs[i] = seg->segment_prob[i];
|
||||
}
|
||||
return gst_d3d11_decoder_start_picture (self->decoder, picture, picture_id);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_decode_picture (GstVp8Decoder * decoder,
|
||||
GstVp8Picture * picture, GstVp8Parser * parser)
|
||||
gst_d3d11_vp8_dec_end_picture (GstDxvaVp8Decoder * decoder,
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics,
|
||||
const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
DXVA_PicParams_VP8 *pic_params = &inner->pic_params;
|
||||
DXVA_Slice_VPx_Short *slice = &inner->slice;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
const GstVp8FrameHdr *frame_hdr = &picture->frame_hdr;
|
||||
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (pic_params, 0, sizeof (DXVA_PicParams_VP8));
|
||||
|
||||
pic_params->first_part_size = frame_hdr->first_part_size;
|
||||
pic_params->width = inner->width;
|
||||
pic_params->height = inner->height;
|
||||
pic_params->CurrPic.Index7Bits = view_id;
|
||||
pic_params->StatusReportFeedbackNumber = 1;
|
||||
|
||||
gst_d3d11_vp8_dec_copy_frame_params (self, picture, parser, pic_params);
|
||||
gst_d3d11_vp8_dec_copy_reference_frames (self, pic_params);
|
||||
gst_d3d11_vp8_dec_copy_segmentation_params (self, parser, pic_params);
|
||||
|
||||
inner->bitstream_buffer.resize (picture->size);
|
||||
memcpy (&inner->bitstream_buffer[0], picture->data, picture->size);
|
||||
|
||||
slice->BSNALunitDataLocation = 0;
|
||||
slice->SliceBytesInBuffer = inner->bitstream_buffer.size ();
|
||||
slice->wBadSliceChopping = 0;
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_end_picture (self->decoder, picture, args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_end_picture (GstVp8Decoder * decoder, GstVp8Picture * picture)
|
||||
gst_d3d11_vp8_dec_output_picture (GstDxvaVp8Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture,
|
||||
GstVideoBufferFlags buffer_flags, gint display_width, gint display_height)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
size_t bitstream_buffer_size;
|
||||
size_t bitstream_pos;
|
||||
GstD3D11DecodeInputStreamArgs input_args;
|
||||
|
||||
if (inner->bitstream_buffer.empty ()) {
|
||||
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
|
||||
|
||||
bitstream_pos = inner->bitstream_buffer.size ();
|
||||
bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
|
||||
|
||||
if (bitstream_buffer_size > bitstream_pos) {
|
||||
size_t padding = bitstream_buffer_size - bitstream_pos;
|
||||
|
||||
/* As per DXVA spec, total amount of bitstream buffer size should be
|
||||
* 128 bytes aligned. If actual data is not multiple of 128 bytes,
|
||||
* the last slice data needs to be zero-padded */
|
||||
inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
|
||||
|
||||
inner->slice.SliceBytesInBuffer += padding;
|
||||
}
|
||||
|
||||
input_args.picture_params = &inner->pic_params;
|
||||
input_args.picture_params_size = sizeof (DXVA_PicParams_VP8);
|
||||
input_args.slice_control = &inner->slice;
|
||||
input_args.slice_control_size = sizeof (DXVA_Slice_VPx_Short);
|
||||
input_args.bitstream = &inner->bitstream_buffer[0];
|
||||
input_args.bitstream_size = inner->bitstream_buffer.size ();
|
||||
|
||||
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &input_args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp8_dec_output_picture (GstVp8Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp8Picture * picture)
|
||||
{
|
||||
GstD3D11Vp8Dec *self = GST_D3D11_VP8_DEC (decoder);
|
||||
GstD3D11Vp8DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
|
||||
0, inner->width, inner->height);
|
||||
return gst_d3d11_decoder_output_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, picture,
|
||||
buffer_flags, display_width, display_height);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -601,18 +288,18 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
guint i;
|
||||
GTypeInfo type_info = {
|
||||
sizeof (GstD3D11Vp8DecClass),
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
(GClassInitFunc) gst_d3d11_vp8_dec_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
sizeof (GstD3D11Vp8Dec),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_d3d11_vp8_dec_init,
|
||||
};
|
||||
const GUID *profile_guid = NULL;
|
||||
GstCaps *sink_caps = NULL;
|
||||
GstCaps *src_caps = NULL;
|
||||
const GUID *profile_guid = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *src_caps = nullptr;
|
||||
guint max_width = 0;
|
||||
guint max_height = 0;
|
||||
guint resolution;
|
||||
|
@ -647,7 +334,7 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
src_caps = gst_caps_from_string ("video/x-raw("
|
||||
GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY "); video/x-raw");
|
||||
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", NULL);
|
||||
gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", nullptr);
|
||||
|
||||
/* To cover both landscape and portrait, select max value */
|
||||
resolution = MAX (max_width, max_height);
|
||||
|
@ -667,7 +354,7 @@ gst_d3d11_vp8_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
feature_name = g_strdup_printf ("d3d11vp8device%ddec", index);
|
||||
}
|
||||
|
||||
type = g_type_register_static (GST_TYPE_VP8_DECODER,
|
||||
type = g_type_register_static (GST_TYPE_DXVA_VP8_DECODER,
|
||||
type_name, &type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_VP8_DEC_H__
|
||||
#define __GST_D3D11_VP8_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -29,5 +28,3 @@ void gst_d3d11_vp8_dec_register (GstPlugin * plugin,
|
|||
guint rank);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_VP8_DEC_H__ */
|
||||
|
|
|
@ -15,36 +15,6 @@
|
|||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
||||
* Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* NOTE: some of implementations are copied/modified from Chromium code
|
||||
*
|
||||
* Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -68,91 +38,13 @@
|
|||
|
||||
#include "gstd3d11vp9dec.h"
|
||||
#include "gstd3d11pluginutils.h"
|
||||
|
||||
#include <gst/codecs/gstvp9decoder.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
|
||||
/* HACK: to expose dxva data structure on UWP */
|
||||
#ifdef WINAPI_PARTITION_DESKTOP
|
||||
#undef WINAPI_PARTITION_DESKTOP
|
||||
#endif
|
||||
#define WINAPI_PARTITION_DESKTOP 1
|
||||
#include <d3d9.h>
|
||||
#include <dxva.h>
|
||||
#include <gst/dxva/gstdxvavp9decoder.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_vp9_dec_debug);
|
||||
#define GST_CAT_DEFAULT gst_d3d11_vp9_dec_debug
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
typedef struct _GstD3D11Vp9DecInner
|
||||
{
|
||||
GstD3D11Device *device = nullptr;
|
||||
GstD3D11Decoder *d3d11_decoder = nullptr;
|
||||
|
||||
DXVA_PicParams_VP9 pic_params;
|
||||
DXVA_Slice_VPx_Short slice;
|
||||
|
||||
/* In case of VP9, there's only one slice per picture so we don't
|
||||
* need this bitstream buffer, but this will be used for 128 bytes alignment */
|
||||
std::vector<guint8> bitstream_buffer;
|
||||
|
||||
/* To calculate use_prev_in_find_mv_refs */
|
||||
guint last_frame_width = 0;
|
||||
guint last_frame_height = 0;
|
||||
gboolean last_show_frame = FALSE;
|
||||
} GstD3D11Vp9DecInner;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
typedef struct _GstD3D11Vp9Dec
|
||||
{
|
||||
GstVp9Decoder parent;
|
||||
GstD3D11Vp9DecInner *inner;
|
||||
} GstD3D11Vp9Dec;
|
||||
|
||||
typedef struct _GstD3D11Vp9DecClass
|
||||
{
|
||||
GstVp9DecoderClass parent_class;
|
||||
GstD3D11DecoderSubClassData class_data;
|
||||
} GstD3D11Vp9DecClass;
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
#define GST_D3D11_VP9_DEC(object) ((GstD3D11Vp9Dec *) (object))
|
||||
#define GST_D3D11_VP9_DEC_GET_CLASS(object) \
|
||||
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11Vp9DecClass))
|
||||
|
||||
static void gst_d3d11_vp9_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
static void gst_d3d11_vp9_dec_finalize (GObject * object);
|
||||
static void gst_d3d11_vp9_dec_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
|
||||
static gboolean gst_d3d11_vp9_dec_open (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder);
|
||||
static gboolean gst_d3d11_vp9_dec_decide_allocation (GstVideoDecoder *
|
||||
decoder, GstQuery * query);
|
||||
static gboolean gst_d3d11_vp9_dec_src_query (GstVideoDecoder * decoder,
|
||||
GstQuery * query);
|
||||
static gboolean gst_d3d11_vp9_dec_sink_event (GstVideoDecoder * decoder,
|
||||
GstEvent * event);
|
||||
|
||||
/* GstVp9Decoder */
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_new_sequence (GstVp9Decoder * decoder,
|
||||
const GstVp9FrameHeader * frame_hdr, gint max_dpb_size);
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp9Picture * picture);
|
||||
static GstVp9Picture *gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstVp9Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
|
||||
GstVp9Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
|
||||
GstVp9Picture * picture, GstVp9Dpb * dpb);
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder,
|
||||
GstVp9Picture * picture);
|
||||
static GstFlowReturn gst_d3d11_vp9_dec_output_picture (GstVp9Decoder *
|
||||
decoder, GstVideoCodecFrame * frame, GstVp9Picture * picture);
|
||||
GST_D3D11_DECODER_DEFINE_TYPE_FULL (GstD3D11Vp9Dec, gst_d3d11_vp9_dec,
|
||||
GST, D3D11_VP9_DEC, GstDxvaVp9Decoder);
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
|
||||
|
@ -160,11 +52,10 @@ gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
|
|||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
|
||||
GstVp9DecoderClass *vp9decoder_class = GST_VP9_DECODER_CLASS (klass);
|
||||
GstDxvaVp9DecoderClass *dxva_class = GST_DXVA_VP9_DECODER_CLASS (klass);
|
||||
GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
|
||||
|
||||
gobject_class->get_property = gst_d3d11_vp9_dec_get_property;
|
||||
gobject_class->finalize = gst_d3d11_vp9_dec_finalize;
|
||||
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_set_context);
|
||||
|
@ -188,29 +79,26 @@ gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass, gpointer data)
|
|||
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_negotiate);
|
||||
decoder_class->decide_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_decide_allocation);
|
||||
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_sink_query);
|
||||
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_src_query);
|
||||
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_sink_event);
|
||||
|
||||
vp9decoder_class->new_sequence =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_new_sequence);
|
||||
vp9decoder_class->new_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_new_picture);
|
||||
vp9decoder_class->duplicate_picture =
|
||||
dxva_class->configure = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_configure);
|
||||
dxva_class->new_picture = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_new_picture);
|
||||
dxva_class->duplicate_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_duplicate_picture);
|
||||
vp9decoder_class->start_picture =
|
||||
dxva_class->get_picture_id =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_get_picture_id);
|
||||
dxva_class->start_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_start_picture);
|
||||
vp9decoder_class->decode_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_decode_picture);
|
||||
vp9decoder_class->end_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_end_picture);
|
||||
vp9decoder_class->output_picture =
|
||||
dxva_class->end_picture = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_end_picture);
|
||||
dxva_class->output_picture =
|
||||
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_output_picture);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_init (GstD3D11Vp9Dec * self)
|
||||
{
|
||||
self->inner = new GstD3D11Vp9DecInner ();
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -223,26 +111,15 @@ gst_d3d11_vp9_dec_get_property (GObject * object, guint prop_id,
|
|||
gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_finalize (GObject * object)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (object);
|
||||
|
||||
delete self->inner;
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (element);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
GstD3D11Vp9DecClass *klass = GST_D3D11_VP9_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
||||
context, cdata->adapter_luid, &inner->device);
|
||||
context, cdata->adapter_luid, &self->device);
|
||||
|
||||
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
||||
}
|
||||
|
@ -252,37 +129,30 @@ gst_d3d11_vp9_dec_open (GstVideoDecoder * decoder)
|
|||
{
|
||||
GstVp9Decoder *vp9dec = GST_VP9_DECODER (decoder);
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
GstD3D11Vp9DecClass *klass = GST_D3D11_VP9_DEC_GET_CLASS (self);
|
||||
GstD3D11DecoderSubClassData *cdata = &klass->class_data;
|
||||
|
||||
if (!gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &inner->device, &inner->d3d11_decoder)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open decoder");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
/* XXX: ConfigDecoderSpecific bit 12 indicates whether accelerator can
|
||||
* support non-keyframe format change or not, but it doesn't seem to be
|
||||
* reliable, since 1b means that it's supported and 0b indicates it may not be
|
||||
* supported. Because some GPUs can support it even if the bit 12 is not
|
||||
* set, do filtering by vendor for now (AMD and Intel looks fine) */
|
||||
if (gst_d3d11_get_device_vendor (inner->device) ==
|
||||
if (gst_d3d11_get_device_vendor (self->device) ==
|
||||
GST_D3D11_DEVICE_VENDOR_NVIDIA) {
|
||||
gst_vp9_decoder_set_non_keyframe_format_change_support (vp9dec, FALSE);
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
return gst_d3d11_decoder_proxy_open (decoder,
|
||||
cdata, &self->device, &self->decoder);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
gst_clear_object (&inner->d3d11_decoder);
|
||||
gst_clear_object (&inner->device);
|
||||
gst_clear_object (&self->decoder);
|
||||
gst_clear_object (&self->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -291,9 +161,8 @@ static gboolean
|
|||
gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
|
||||
if (!gst_d3d11_decoder_negotiate (self->decoder, decoder))
|
||||
return FALSE;
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
||||
|
@ -304,27 +173,42 @@ gst_d3d11_vp9_dec_decide_allocation (GstVideoDecoder * decoder,
|
|||
GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
|
||||
decoder, query)) {
|
||||
if (!gst_d3d11_decoder_decide_allocation (self->decoder, decoder, query))
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
|
||||
(decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp9_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
gst_d3d11_vp9_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, inner->device)) {
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_d3d11_vp9_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
|
||||
query, self->device)) {
|
||||
return TRUE;
|
||||
}
|
||||
break;
|
||||
|
@ -339,392 +223,82 @@ static gboolean
|
|||
gst_d3d11_vp9_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_START:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
if (inner->d3d11_decoder)
|
||||
gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (self->decoder)
|
||||
gst_d3d11_decoder_sink_event (self->decoder, event);
|
||||
|
||||
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_new_sequence (GstVp9Decoder * decoder,
|
||||
const GstVp9FrameHeader * frame_hdr, gint max_dpb_size)
|
||||
gst_d3d11_vp9_dec_configure (GstDxvaVp9Decoder * decoder,
|
||||
GstVideoCodecState * input_state, const GstVideoInfo * info,
|
||||
gint crop_x, gint crop_y, gint coded_width, gint coded_height,
|
||||
gint max_dpb_size)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
GstVideoInfo info;
|
||||
GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
|
||||
GST_LOG_OBJECT (self, "new sequence");
|
||||
|
||||
if (frame_hdr->profile == GST_VP9_PROFILE_0)
|
||||
out_format = GST_VIDEO_FORMAT_NV12;
|
||||
else if (frame_hdr->profile == GST_VP9_PROFILE_2)
|
||||
out_format = GST_VIDEO_FORMAT_P010_10LE;
|
||||
|
||||
if (out_format == GST_VIDEO_FORMAT_UNKNOWN) {
|
||||
GST_ERROR_OBJECT (self, "Could not support profile %d", frame_hdr->profile);
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
gst_video_info_set_format (&info,
|
||||
out_format, frame_hdr->width, frame_hdr->height);
|
||||
|
||||
if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
|
||||
decoder->input_state, &info, 0, 0, (gint) frame_hdr->width,
|
||||
(gint) frame_hdr->height, max_dpb_size)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create decoder");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
|
||||
GST_WARNING_OBJECT (self, "Failed to negotiate with downstream");
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
/* Will be updated per decode_picture */
|
||||
inner->last_frame_width = inner->last_frame_height = 0;
|
||||
inner->last_show_frame = FALSE;
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_configure (self->decoder, input_state,
|
||||
info, crop_x, crop_y, coded_width, coded_height, max_dpb_size);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp9Picture * picture)
|
||||
gst_d3d11_vp9_dec_new_picture (GstDxvaVp9Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_new_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), GST_CODEC_PICTURE (picture));
|
||||
}
|
||||
|
||||
static GstVp9Picture *
|
||||
gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp9Picture * picture)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstBuffer *view_buffer;
|
||||
GstVp9Picture *new_picture;
|
||||
|
||||
view_buffer = (GstBuffer *) gst_vp9_picture_get_user_data (picture);
|
||||
|
||||
if (!view_buffer) {
|
||||
GST_ERROR_OBJECT (self, "Parent picture does not have output view buffer");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
new_picture = gst_vp9_picture_new ();
|
||||
new_picture->frame_hdr = picture->frame_hdr;
|
||||
|
||||
GST_LOG_OBJECT (self, "Duplicate output with buffer %" GST_PTR_FORMAT,
|
||||
view_buffer);
|
||||
|
||||
gst_vp9_picture_set_user_data (new_picture,
|
||||
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
|
||||
|
||||
return new_picture;
|
||||
return gst_d3d11_decoder_new_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_start_picture (GstVp9Decoder * decoder,
|
||||
GstVp9Picture * picture)
|
||||
gst_d3d11_vp9_dec_duplicate_picture (GstDxvaVp9Decoder * decoder,
|
||||
GstCodecPicture * src, GstCodecPicture * dst)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
inner->bitstream_buffer.resize (0);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_duplicate_picture (self->decoder, src, dst);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_frame_params (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
|
||||
static guint8
|
||||
gst_d3d11_vp9_dec_get_picture_id (GstDxvaVp9Decoder * decoder,
|
||||
GstCodecPicture * picture)
|
||||
{
|
||||
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
|
||||
params->profile = frame_hdr->profile;
|
||||
params->frame_type = frame_hdr->frame_type;
|
||||
params->show_frame = frame_hdr->show_frame;
|
||||
params->error_resilient_mode = frame_hdr->error_resilient_mode;
|
||||
params->subsampling_x = frame_hdr->subsampling_x;
|
||||
params->subsampling_y = frame_hdr->subsampling_y;
|
||||
params->refresh_frame_context = frame_hdr->refresh_frame_context;
|
||||
params->frame_parallel_decoding_mode =
|
||||
frame_hdr->frame_parallel_decoding_mode;
|
||||
params->intra_only = frame_hdr->intra_only;
|
||||
params->frame_context_idx = frame_hdr->frame_context_idx;
|
||||
params->reset_frame_context = frame_hdr->reset_frame_context;
|
||||
if (frame_hdr->frame_type == GST_VP9_KEY_FRAME)
|
||||
params->allow_high_precision_mv = 0;
|
||||
else
|
||||
params->allow_high_precision_mv = frame_hdr->allow_high_precision_mv;
|
||||
|
||||
params->width = frame_hdr->width;
|
||||
params->height = frame_hdr->height;
|
||||
params->BitDepthMinus8Luma = frame_hdr->bit_depth - 8;
|
||||
params->BitDepthMinus8Chroma = frame_hdr->bit_depth - 8;
|
||||
|
||||
params->interp_filter = frame_hdr->interpolation_filter;
|
||||
params->log2_tile_cols = frame_hdr->tile_cols_log2;
|
||||
params->log2_tile_rows = frame_hdr->tile_rows_log2;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_reference_frames (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, GstVp9Dpb * dpb, DXVA_PicParams_VP9 * params)
|
||||
{
|
||||
GstD3D11Decoder *decoder = self->inner->d3d11_decoder;
|
||||
gint i;
|
||||
|
||||
for (i = 0; i < GST_VP9_REF_FRAMES; i++) {
|
||||
if (dpb->pic_list[i]) {
|
||||
GstVp9Picture *other_pic = dpb->pic_list[i];
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (decoder,
|
||||
GST_CODEC_PICTURE (other_pic), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "picture does not have output view handle");
|
||||
return;
|
||||
}
|
||||
|
||||
params->ref_frame_map[i].Index7Bits = view_id;
|
||||
params->ref_frame_coded_width[i] = picture->frame_hdr.width;
|
||||
params->ref_frame_coded_height[i] = picture->frame_hdr.height;
|
||||
} else {
|
||||
params->ref_frame_map[i].bPicEntry = 0xff;
|
||||
params->ref_frame_coded_width[i] = 0;
|
||||
params->ref_frame_coded_height[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_frame_refs (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
|
||||
{
|
||||
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
|
||||
gint i;
|
||||
|
||||
for (i = 0; i < GST_VP9_REFS_PER_FRAME; i++) {
|
||||
params->frame_refs[i] = params->ref_frame_map[frame_hdr->ref_frame_idx[i]];
|
||||
}
|
||||
|
||||
G_STATIC_ASSERT (G_N_ELEMENTS (params->ref_frame_sign_bias) ==
|
||||
G_N_ELEMENTS (frame_hdr->ref_frame_sign_bias));
|
||||
G_STATIC_ASSERT (sizeof (params->ref_frame_sign_bias) ==
|
||||
sizeof (frame_hdr->ref_frame_sign_bias));
|
||||
memcpy (params->ref_frame_sign_bias,
|
||||
frame_hdr->ref_frame_sign_bias, sizeof (frame_hdr->ref_frame_sign_bias));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_loop_filter_params (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
|
||||
{
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
|
||||
const GstVp9LoopFilterParams *lfp = &frame_hdr->loop_filter_params;
|
||||
|
||||
params->filter_level = lfp->loop_filter_level;
|
||||
params->sharpness_level = lfp->loop_filter_sharpness;
|
||||
params->mode_ref_delta_enabled = lfp->loop_filter_delta_enabled;
|
||||
params->mode_ref_delta_update = lfp->loop_filter_delta_update;
|
||||
params->use_prev_in_find_mv_refs =
|
||||
inner->last_show_frame && !frame_hdr->error_resilient_mode;
|
||||
|
||||
if (frame_hdr->frame_type != GST_VP9_KEY_FRAME && !frame_hdr->intra_only) {
|
||||
params->use_prev_in_find_mv_refs &=
|
||||
(frame_hdr->width == inner->last_frame_width &&
|
||||
frame_hdr->height == inner->last_frame_height);
|
||||
}
|
||||
|
||||
G_STATIC_ASSERT (G_N_ELEMENTS (params->ref_deltas) ==
|
||||
G_N_ELEMENTS (lfp->loop_filter_ref_deltas));
|
||||
G_STATIC_ASSERT (sizeof (params->ref_deltas) ==
|
||||
sizeof (lfp->loop_filter_ref_deltas));
|
||||
memcpy (params->ref_deltas, lfp->loop_filter_ref_deltas,
|
||||
sizeof (lfp->loop_filter_ref_deltas));
|
||||
|
||||
G_STATIC_ASSERT (G_N_ELEMENTS (params->mode_deltas) ==
|
||||
G_N_ELEMENTS (lfp->loop_filter_mode_deltas));
|
||||
G_STATIC_ASSERT (sizeof (params->mode_deltas) ==
|
||||
sizeof (lfp->loop_filter_mode_deltas));
|
||||
memcpy (params->mode_deltas, lfp->loop_filter_mode_deltas,
|
||||
sizeof (lfp->loop_filter_mode_deltas));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_quant_params (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
|
||||
{
|
||||
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
|
||||
const GstVp9QuantizationParams *qp = &frame_hdr->quantization_params;
|
||||
|
||||
params->base_qindex = qp->base_q_idx;
|
||||
params->y_dc_delta_q = qp->delta_q_y_dc;
|
||||
params->uv_dc_delta_q = qp->delta_q_uv_dc;
|
||||
params->uv_ac_delta_q = qp->delta_q_uv_ac;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_d3d11_vp9_dec_copy_segmentation_params (GstD3D11Vp9Dec * self,
|
||||
GstVp9Picture * picture, DXVA_PicParams_VP9 * params)
|
||||
{
|
||||
const GstVp9FrameHeader *frame_hdr = &picture->frame_hdr;
|
||||
const GstVp9SegmentationParams *sp = &frame_hdr->segmentation_params;
|
||||
gint i, j;
|
||||
|
||||
params->stVP9Segments.enabled = sp->segmentation_enabled;
|
||||
params->stVP9Segments.update_map = sp->segmentation_update_map;
|
||||
params->stVP9Segments.temporal_update = sp->segmentation_temporal_update;
|
||||
params->stVP9Segments.abs_delta = sp->segmentation_abs_or_delta_update;
|
||||
|
||||
G_STATIC_ASSERT (G_N_ELEMENTS (params->stVP9Segments.tree_probs) ==
|
||||
G_N_ELEMENTS (sp->segmentation_tree_probs));
|
||||
G_STATIC_ASSERT (sizeof (params->stVP9Segments.tree_probs) ==
|
||||
sizeof (sp->segmentation_tree_probs));
|
||||
memcpy (params->stVP9Segments.tree_probs, sp->segmentation_tree_probs,
|
||||
sizeof (sp->segmentation_tree_probs));
|
||||
|
||||
G_STATIC_ASSERT (G_N_ELEMENTS (params->stVP9Segments.pred_probs) ==
|
||||
G_N_ELEMENTS (sp->segmentation_pred_prob));
|
||||
G_STATIC_ASSERT (sizeof (params->stVP9Segments.pred_probs) ==
|
||||
sizeof (sp->segmentation_pred_prob));
|
||||
|
||||
if (sp->segmentation_temporal_update) {
|
||||
memcpy (params->stVP9Segments.pred_probs, sp->segmentation_pred_prob,
|
||||
sizeof (params->stVP9Segments.pred_probs));
|
||||
} else {
|
||||
memset (params->stVP9Segments.pred_probs, 255,
|
||||
sizeof (params->stVP9Segments.pred_probs));
|
||||
}
|
||||
|
||||
for (i = 0; i < GST_VP9_MAX_SEGMENTS; i++) {
|
||||
params->stVP9Segments.feature_mask[i] =
|
||||
(sp->feature_enabled[i][GST_VP9_SEG_LVL_ALT_Q] << 0) |
|
||||
(sp->feature_enabled[i][GST_VP9_SEG_LVL_ALT_L] << 1) |
|
||||
(sp->feature_enabled[i][GST_VP9_SEG_LVL_REF_FRAME] << 2) |
|
||||
(sp->feature_enabled[i][GST_VP9_SEG_SEG_LVL_SKIP] << 3);
|
||||
|
||||
for (j = 0; j < 3; j++)
|
||||
params->stVP9Segments.feature_data[i][j] = sp->feature_data[i][j];
|
||||
params->stVP9Segments.feature_data[i][3] = 0;
|
||||
}
|
||||
return gst_d3d11_decoder_get_picture_id (self->decoder, picture);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_decode_picture (GstVp9Decoder * decoder,
|
||||
GstVp9Picture * picture, GstVp9Dpb * dpb)
|
||||
gst_d3d11_vp9_dec_start_picture (GstDxvaVp9Decoder * decoder,
|
||||
GstCodecPicture * picture, guint8 * picture_id)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
DXVA_PicParams_VP9 *pic_params = &inner->pic_params;
|
||||
DXVA_Slice_VPx_Short *slice = &inner->slice;
|
||||
ID3D11VideoDecoderOutputView *view;
|
||||
guint8 view_id = 0xff;
|
||||
|
||||
view = gst_d3d11_decoder_get_output_view_from_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &view_id);
|
||||
if (!view) {
|
||||
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (pic_params, 0, sizeof (DXVA_PicParams_VP9));
|
||||
|
||||
pic_params->CurrPic.Index7Bits = view_id;
|
||||
pic_params->uncompressed_header_size_byte_aligned =
|
||||
picture->frame_hdr.frame_header_length_in_bytes;
|
||||
pic_params->first_partition_size = picture->frame_hdr.header_size_in_bytes;
|
||||
pic_params->StatusReportFeedbackNumber = 1;
|
||||
|
||||
gst_d3d11_vp9_dec_copy_frame_params (self, picture, pic_params);
|
||||
gst_d3d11_vp9_dec_copy_reference_frames (self, picture, dpb, pic_params);
|
||||
gst_d3d11_vp9_dec_copy_frame_refs (self, picture, pic_params);
|
||||
gst_d3d11_vp9_dec_copy_loop_filter_params (self, picture, pic_params);
|
||||
gst_d3d11_vp9_dec_copy_quant_params (self, picture, pic_params);
|
||||
gst_d3d11_vp9_dec_copy_segmentation_params (self, picture, pic_params);
|
||||
|
||||
inner->bitstream_buffer.resize (picture->size);
|
||||
memcpy (&inner->bitstream_buffer[0], picture->data, picture->size);
|
||||
|
||||
slice->BSNALunitDataLocation = 0;
|
||||
slice->SliceBytesInBuffer = inner->bitstream_buffer.size ();
|
||||
slice->wBadSliceChopping = 0;
|
||||
|
||||
inner->last_frame_width = pic_params->width;
|
||||
inner->last_frame_height = pic_params->height;
|
||||
inner->last_show_frame = pic_params->show_frame;
|
||||
|
||||
return GST_FLOW_OK;
|
||||
return gst_d3d11_decoder_start_picture (self->decoder, picture, picture_id);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_end_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
|
||||
gst_d3d11_vp9_dec_end_picture (GstDxvaVp9Decoder * decoder,
|
||||
GstCodecPicture * picture, GPtrArray * ref_pics,
|
||||
const GstDxvaDecodingArgs * args)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
size_t bitstream_buffer_size;
|
||||
size_t bitstream_pos;
|
||||
GstD3D11DecodeInputStreamArgs input_args;
|
||||
|
||||
if (inner->bitstream_buffer.empty ()) {
|
||||
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
|
||||
|
||||
bitstream_pos = inner->bitstream_buffer.size ();
|
||||
bitstream_buffer_size = GST_ROUND_UP_128 (bitstream_pos);
|
||||
|
||||
if (bitstream_buffer_size > bitstream_pos) {
|
||||
size_t padding = bitstream_buffer_size - bitstream_pos;
|
||||
|
||||
/* As per DXVA spec, total amount of bitstream buffer size should be
|
||||
* 128 bytes aligned. If actual data is not multiple of 128 bytes,
|
||||
* the last slice data needs to be zero-padded */
|
||||
inner->bitstream_buffer.resize (bitstream_buffer_size, 0);
|
||||
|
||||
inner->slice.SliceBytesInBuffer += padding;
|
||||
}
|
||||
|
||||
input_args.picture_params = &inner->pic_params;
|
||||
input_args.picture_params_size = sizeof (DXVA_PicParams_VP9);
|
||||
input_args.slice_control = &inner->slice;
|
||||
input_args.slice_control_size = sizeof (DXVA_Slice_VPx_Short);
|
||||
input_args.bitstream = &inner->bitstream_buffer[0];
|
||||
input_args.bitstream_size = inner->bitstream_buffer.size ();
|
||||
|
||||
return gst_d3d11_decoder_decode_picture (inner->d3d11_decoder,
|
||||
GST_CODEC_PICTURE (picture), &input_args);
|
||||
return gst_d3d11_decoder_end_picture (self->decoder, picture, args);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_d3d11_vp9_dec_output_picture (GstVp9Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstVp9Picture * picture)
|
||||
gst_d3d11_vp9_dec_output_picture (GstDxvaVp9Decoder * decoder,
|
||||
GstVideoCodecFrame * frame, GstCodecPicture * picture,
|
||||
GstVideoBufferFlags buffer_flags, gint display_width, gint display_height)
|
||||
{
|
||||
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
|
||||
GstD3D11Vp9DecInner *inner = self->inner;
|
||||
|
||||
return gst_d3d11_decoder_output_picture (inner->d3d11_decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, GST_CODEC_PICTURE (picture),
|
||||
0, picture->frame_hdr.width, picture->frame_hdr.height);
|
||||
return gst_d3d11_decoder_output_picture (self->decoder,
|
||||
GST_VIDEO_DECODER (decoder), frame, picture,
|
||||
buffer_flags, display_width, display_height);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -739,20 +313,20 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
const GUID *profile;
|
||||
GTypeInfo type_info = {
|
||||
sizeof (GstD3D11Vp9DecClass),
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
(GClassInitFunc) gst_d3d11_vp9_dec_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
nullptr,
|
||||
nullptr,
|
||||
sizeof (GstD3D11Vp9Dec),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_d3d11_vp9_dec_init,
|
||||
};
|
||||
const GUID *profile2_guid = NULL;
|
||||
const GUID *profile0_guid = NULL;
|
||||
GstCaps *sink_caps = NULL;
|
||||
GstCaps *src_caps = NULL;
|
||||
GstCaps *d3d11_caps = NULL;
|
||||
const GUID *profile2_guid = nullptr;
|
||||
const GUID *profile0_guid = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *src_caps = nullptr;
|
||||
GstCaps *d3d11_caps = nullptr;
|
||||
guint max_width = 0;
|
||||
guint max_height = 0;
|
||||
guint resolution;
|
||||
|
@ -843,7 +417,7 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
|
||||
d3d11_caps = gst_caps_copy (src_caps);
|
||||
gst_caps_set_features_simple (d3d11_caps,
|
||||
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, NULL));
|
||||
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
|
||||
src_caps = gst_caps_merge (d3d11_caps, src_caps);
|
||||
|
||||
/* To cover both landscape and portrait, select max value */
|
||||
|
@ -864,7 +438,7 @@ gst_d3d11_vp9_dec_register (GstPlugin * plugin, GstD3D11Device * device,
|
|||
feature_name = g_strdup_printf ("d3d11vp9device%ddec", index);
|
||||
}
|
||||
|
||||
type = g_type_register_static (GST_TYPE_VP9_DECODER,
|
||||
type = g_type_register_static (GST_TYPE_DXVA_VP9_DECODER,
|
||||
type_name, &type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_D3D11_VP9_DEC_H__
|
||||
#define __GST_D3D11_VP9_DEC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstd3d11decoder.h"
|
||||
|
||||
|
@ -30,4 +29,3 @@ void gst_d3d11_vp9_dec_register (GstPlugin * plugin,
|
|||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_D3D11_VP9_DEC_H__ */
|
||||
|
|
|
@ -37,7 +37,7 @@ if host_system != 'windows' or d3d11_option.disabled()
|
|||
subdir_done()
|
||||
endif
|
||||
|
||||
if not gstd3d11_dep.found() or not cc.has_header('dxva.h') or not cc.has_header('d3d9.h')
|
||||
if not gstd3d11_dep.found() or not gstdxva_dep.found()
|
||||
if d3d11_option.enabled()
|
||||
error('The d3d11 was enabled explicitly, but required dependencies were not found.')
|
||||
endif
|
||||
|
@ -129,7 +129,7 @@ gstd3d11 = library('gstd3d11',
|
|||
cpp_args: gst_plugins_bad_args + extra_args,
|
||||
include_directories : [configinc],
|
||||
dependencies : [gstbase_dep, gstvideo_dep, gmodule_dep, gstcontroller_dep,
|
||||
gstd3d11_dep, gstcodecs_dep, d2d_dep] + extra_dep,
|
||||
gstd3d11_dep, gstdxva_dep, d2d_dep] + extra_dep,
|
||||
install : true,
|
||||
install_dir : plugins_install_dir,
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue