2022-02-19 18:59:32 +00:00
|
|
|
/* GStreamer
|
|
|
|
* Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
#include "config.h"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include "gstqsvdecoder.h"
|
|
|
|
#include <mfxvideo++.h>
|
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
#include "gstqsvallocator_d3d11.h"
|
|
|
|
|
|
|
|
#include <wrl.h>
|
|
|
|
|
|
|
|
/* *INDENT-OFF* */
|
|
|
|
using namespace Microsoft::WRL;
|
|
|
|
/* *INDENT-ON* */
|
|
|
|
#else
|
|
|
|
#include "gstqsvallocator_va.h"
|
|
|
|
#endif /* G_OS_WIN32 */
|
|
|
|
|
2022-04-16 15:00:55 +00:00
|
|
|
GST_DEBUG_CATEGORY_STATIC (gst_qsv_decoder_debug);
|
2022-02-19 18:59:32 +00:00
|
|
|
#define GST_CAT_DEFAULT gst_qsv_decoder_debug
|
|
|
|
|
|
|
|
#define GST_QSV_DECODER_FLOW_NEW_SEQUENCE GST_FLOW_CUSTOM_SUCCESS_1
|
|
|
|
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_0,
|
|
|
|
PROP_ADAPTER_LUID,
|
|
|
|
PROP_DEVICE_PATH,
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct _GstQsvDecoderSurface
|
|
|
|
{
|
|
|
|
mfxFrameSurface1 surface;
|
|
|
|
|
|
|
|
/* mfxFrameSurface1:Data:MemId */
|
|
|
|
GstQsvFrame *frame;
|
|
|
|
gboolean need_output;
|
|
|
|
} GstQsvDecoderSurface;
|
|
|
|
|
|
|
|
typedef struct _GstQsvDecoderTask
|
|
|
|
{
|
|
|
|
mfxSyncPoint sync_point;
|
|
|
|
|
|
|
|
/* without ownership */
|
|
|
|
GstQsvDecoderSurface *surface;
|
|
|
|
} GstQsvDecoderTask;
|
|
|
|
|
|
|
|
struct _GstQsvDecoderPrivate
|
|
|
|
{
|
|
|
|
GstObject *device;
|
|
|
|
|
|
|
|
GstVideoCodecState *input_state;
|
|
|
|
GstVideoCodecState *output_state;
|
|
|
|
GstQsvAllocator *allocator;
|
|
|
|
|
|
|
|
GstBufferPool *internal_pool;
|
|
|
|
|
|
|
|
GstVideoInfo info;
|
|
|
|
GstVideoInfo aligned_info;
|
|
|
|
|
|
|
|
mfxSession session;
|
|
|
|
mfxVideoParam video_param;
|
|
|
|
|
|
|
|
/* holding allocated GstQsvFrame, should be cleared via
|
|
|
|
* mfxFrameAllocator::Free() */
|
|
|
|
mfxFrameAllocResponse response;
|
|
|
|
|
|
|
|
MFXVideoDECODE *decoder;
|
|
|
|
GstQsvMemoryType mem_type;
|
2022-04-17 13:25:14 +00:00
|
|
|
gboolean use_video_memory;
|
|
|
|
gboolean have_video_meta;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
gboolean is_live;
|
|
|
|
|
|
|
|
/* Array of GstQsvDecoderSurface */
|
|
|
|
GArray *surface_pool;
|
|
|
|
|
|
|
|
/* Array of GstQsvDecoderTask */
|
|
|
|
GArray *task_pool;
|
|
|
|
guint next_task_index;
|
|
|
|
};
|
|
|
|
|
2022-10-21 10:08:49 +00:00
|
|
|
/**
|
|
|
|
* GstQsvDecoder:
|
|
|
|
*
|
|
|
|
* Base class for Intel Quick Sync video decoders
|
|
|
|
*
|
|
|
|
* Since: 1.22
|
|
|
|
*/
|
2022-02-19 18:59:32 +00:00
|
|
|
#define gst_qsv_decoder_parent_class parent_class
|
2022-04-16 15:00:55 +00:00
|
|
|
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstQsvDecoder, gst_qsv_decoder,
|
|
|
|
GST_TYPE_VIDEO_DECODER, G_ADD_PRIVATE (GstQsvDecoder);
|
|
|
|
GST_DEBUG_CATEGORY_INIT (gst_qsv_decoder_debug,
|
|
|
|
"qsvdecoder", 0, "qsvdecoder"));
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
static void gst_qsv_decoder_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec);
|
|
|
|
static void gst_qsv_decoder_dispose (GObject * object);
|
|
|
|
static void gst_qsv_decoder_finalize (GObject * object);
|
|
|
|
|
|
|
|
static void gst_qsv_decoder_set_context (GstElement * element,
|
|
|
|
GstContext * context);
|
|
|
|
|
|
|
|
static gboolean gst_qsv_decoder_open (GstVideoDecoder * decoder);
|
|
|
|
static gboolean gst_qsv_decoder_stop (GstVideoDecoder * decoder);
|
|
|
|
static gboolean gst_qsv_decoder_close (GstVideoDecoder * decoder);
|
|
|
|
static gboolean gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecState * state);
|
|
|
|
static gboolean gst_qsv_decoder_negotiate (GstVideoDecoder * decoder);
|
|
|
|
static gboolean gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder,
|
|
|
|
GstQuery * query);
|
|
|
|
static gboolean gst_qsv_decoder_sink_query (GstVideoDecoder * decoder,
|
|
|
|
GstQuery * query);
|
|
|
|
static gboolean gst_qsv_decoder_src_query (GstVideoDecoder * decoder,
|
|
|
|
GstQuery * query);
|
|
|
|
static GstFlowReturn gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame);
|
|
|
|
static gboolean gst_qsv_decoder_flush (GstVideoDecoder * decoder);
|
|
|
|
static GstFlowReturn gst_qsv_decoder_finish (GstVideoDecoder * decoder);
|
|
|
|
static GstFlowReturn gst_qsv_decoder_drain (GstVideoDecoder * decoder);
|
|
|
|
|
|
|
|
static void gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface);
|
|
|
|
static void gst_qsv_decoder_task_clear (GstQsvDecoderTask * task);
|
2022-10-27 18:25:38 +00:00
|
|
|
static gboolean gst_qsv_decoder_negotiate_internal (GstVideoDecoder * decoder,
|
|
|
|
const mfxFrameInfo * frame_info);
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_class_init (GstQsvDecoderClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
|
|
|
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
|
|
|
GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
|
2022-10-21 17:53:16 +00:00
|
|
|
GParamFlags param_flags = (GParamFlags) (GST_PARAM_DOC_SHOW_DEFAULT |
|
|
|
|
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_READABLE |
|
|
|
|
G_PARAM_STATIC_STRINGS);
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
object_class->get_property = gst_qsv_decoder_get_property;
|
|
|
|
object_class->dispose = gst_qsv_decoder_dispose;
|
|
|
|
object_class->finalize = gst_qsv_decoder_finalize;
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
|
|
|
|
g_param_spec_int64 ("adapter-luid", "Adapter LUID",
|
|
|
|
"DXGI Adapter LUID (Locally Unique Identifier) of created device",
|
2022-10-21 17:53:16 +00:00
|
|
|
G_MININT64, G_MAXINT64, 0, param_flags));
|
2022-02-19 18:59:32 +00:00
|
|
|
#else
|
|
|
|
g_object_class_install_property (object_class, PROP_DEVICE_PATH,
|
|
|
|
g_param_spec_string ("device-path", "Device Path",
|
2022-10-21 17:53:16 +00:00
|
|
|
"DRM device path", nullptr, param_flags));
|
2022-02-19 18:59:32 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
element_class->set_context = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_context);
|
|
|
|
|
|
|
|
videodec_class->open = GST_DEBUG_FUNCPTR (gst_qsv_decoder_open);
|
|
|
|
videodec_class->stop = GST_DEBUG_FUNCPTR (gst_qsv_decoder_stop);
|
|
|
|
videodec_class->close = GST_DEBUG_FUNCPTR (gst_qsv_decoder_close);
|
|
|
|
videodec_class->negotiate = GST_DEBUG_FUNCPTR (gst_qsv_decoder_negotiate);
|
|
|
|
videodec_class->decide_allocation =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_qsv_decoder_decide_allocation);
|
|
|
|
videodec_class->sink_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_sink_query);
|
|
|
|
videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_qsv_decoder_src_query);
|
|
|
|
videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_decoder_set_format);
|
|
|
|
videodec_class->handle_frame =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_qsv_decoder_handle_frame);
|
|
|
|
videodec_class->drain = GST_DEBUG_FUNCPTR (gst_qsv_decoder_drain);
|
|
|
|
videodec_class->finish = GST_DEBUG_FUNCPTR (gst_qsv_decoder_finish);
|
|
|
|
videodec_class->flush = GST_DEBUG_FUNCPTR (gst_qsv_decoder_flush);
|
2022-10-21 10:08:49 +00:00
|
|
|
|
|
|
|
gst_type_mark_as_plugin_api (GST_TYPE_QSV_DECODER, (GstPluginAPIFlags) 0);
|
2022-02-19 18:59:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_init (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv;
|
|
|
|
|
|
|
|
priv = self->priv =
|
|
|
|
(GstQsvDecoderPrivate *) gst_qsv_decoder_get_instance_private (self);
|
|
|
|
|
|
|
|
priv->surface_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderSurface));
|
|
|
|
g_array_set_clear_func (priv->surface_pool,
|
|
|
|
(GDestroyNotify) gst_qsv_decoder_surface_clear);
|
|
|
|
|
|
|
|
priv->task_pool = g_array_new (FALSE, TRUE, sizeof (GstQsvDecoderTask));
|
|
|
|
g_array_set_clear_func (priv->task_pool,
|
|
|
|
(GDestroyNotify) gst_qsv_decoder_task_clear);
|
|
|
|
|
|
|
|
gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_get_property (GObject * object, guint prop_id, GValue * value,
|
|
|
|
GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_ADAPTER_LUID:
|
|
|
|
g_value_set_int64 (value, klass->adapter_luid);
|
|
|
|
break;
|
|
|
|
case PROP_DEVICE_PATH:
|
|
|
|
g_value_set_string (value, klass->display_path);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_dispose (GObject * object)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (object);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
|
|
|
gst_clear_object (&priv->device);
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->dispose (object);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_finalize (GObject * object)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (object);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
|
|
|
g_array_unref (priv->task_pool);
|
|
|
|
g_array_unref (priv->surface_pool);
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_set_context (GstElement * element, GstContext * context)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (element);
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (element);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
2022-03-25 12:03:02 +00:00
|
|
|
#ifdef G_OS_WIN32
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_d3d11_handle_set_context_for_adapter_luid (element,
|
|
|
|
context, klass->adapter_luid, (GstD3D11Device **) & priv->device);
|
2022-03-25 12:03:02 +00:00
|
|
|
#else
|
|
|
|
gst_va_handle_set_context (element, context, klass->display_path,
|
|
|
|
(GstVaDisplay **) & priv->device);
|
2022-02-19 18:59:32 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
|
|
|
ComPtr < ID3D10Multithread > multi_thread;
|
|
|
|
HRESULT hr;
|
|
|
|
ID3D11Device *device_handle;
|
|
|
|
mfxStatus status;
|
|
|
|
GstD3D11Device *device;
|
|
|
|
|
|
|
|
if (!gst_d3d11_ensure_element_data_for_adapter_luid (GST_ELEMENT (self),
|
|
|
|
klass->adapter_luid, (GstD3D11Device **) & priv->device)) {
|
|
|
|
GST_ERROR_OBJECT (self, "d3d11 device is unavailable");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
device = GST_D3D11_DEVICE_CAST (priv->device);
|
|
|
|
priv->allocator = gst_qsv_d3d11_allocator_new (device);
|
|
|
|
|
|
|
|
/* For D3D11 device handle to be used by QSV, multithread protection layer
|
|
|
|
* must be enabled before the MFXVideoCORE_SetHandle() call.
|
|
|
|
*
|
|
|
|
* TODO: Need to check performance impact by this mutithread protection layer,
|
|
|
|
* since it may have a negative impact on overall pipeline performance.
|
|
|
|
* If so, we should create decoding session dedicated d3d11 device and
|
|
|
|
* make use of shared resource */
|
|
|
|
device_handle = gst_d3d11_device_get_device_handle (device);
|
|
|
|
hr = device_handle->QueryInterface (IID_PPV_ARGS (&multi_thread));
|
|
|
|
if (!gst_d3d11_result (hr, device)) {
|
|
|
|
GST_ERROR_OBJECT (self, "ID3D10Multithread interface is unavailable");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
multi_thread->SetMultithreadProtected (TRUE);
|
|
|
|
status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_D3D11_DEVICE,
|
|
|
|
device_handle);
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to set d3d11 device handle");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Similar to the QSV encoder, we don't use this allocator for actual
|
|
|
|
* D3D11 texture allocation. But still required because of QSV API design.
|
|
|
|
*/
|
|
|
|
status = MFXVideoCORE_SetFrameAllocator (priv->session,
|
|
|
|
gst_qsv_allocator_get_allocator_handle (priv->allocator));
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_open_platform_device (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
|
|
|
mfxStatus status;
|
|
|
|
GstVaDisplay *display;
|
|
|
|
|
2022-03-25 12:03:02 +00:00
|
|
|
if (!gst_va_ensure_element_data (GST_ELEMENT (self), klass->display_path,
|
|
|
|
(GstVaDisplay **) & priv->device)) {
|
|
|
|
GST_ERROR_OBJECT (self, "VA display is unavailable");
|
|
|
|
return FALSE;
|
2022-02-19 18:59:32 +00:00
|
|
|
}
|
|
|
|
|
2022-03-25 12:03:02 +00:00
|
|
|
display = GST_VA_DISPLAY (priv->device);
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
priv->allocator = gst_qsv_va_allocator_new (display);
|
|
|
|
|
|
|
|
status = MFXVideoCORE_SetHandle (priv->session, MFX_HANDLE_VA_DISPLAY,
|
|
|
|
gst_va_display_get_va_dpy (display));
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to set VA display handle");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
status = MFXVideoCORE_SetFrameAllocator (priv->session,
|
|
|
|
gst_qsv_allocator_get_allocator_handle (priv->allocator));
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to set frame allocator %d", status);
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_open (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
|
|
|
mfxStatus status;
|
|
|
|
|
|
|
|
status = MFXCreateSession (gst_qsv_get_loader (), klass->impl_index,
|
|
|
|
&priv->session);
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to create session");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!gst_qsv_decoder_open_platform_device (self)) {
|
|
|
|
g_clear_pointer (&priv->session, MFXClose);
|
|
|
|
gst_clear_object (&priv->allocator);
|
|
|
|
gst_clear_object (&priv->device);
|
|
|
|
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_reset (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Reset");
|
|
|
|
|
|
|
|
if (priv->decoder) {
|
|
|
|
delete priv->decoder;
|
|
|
|
priv->decoder = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->internal_pool) {
|
|
|
|
gst_buffer_pool_set_active (priv->internal_pool, FALSE);
|
|
|
|
gst_clear_object (&priv->internal_pool);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->allocator) {
|
|
|
|
mfxFrameAllocator *alloc =
|
|
|
|
gst_qsv_allocator_get_allocator_handle (priv->allocator);
|
|
|
|
alloc->Free ((mfxHDL) priv->allocator, &priv->response);
|
|
|
|
}
|
|
|
|
memset (&priv->response, 0, sizeof (mfxFrameAllocResponse));
|
|
|
|
|
|
|
|
g_array_set_size (priv->surface_pool, 0);
|
|
|
|
g_array_set_size (priv->task_pool, 0);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_stop (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
|
|
|
g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
|
|
|
|
g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
|
|
|
|
|
|
|
|
return gst_qsv_decoder_reset (self);
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_close (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
|
|
|
g_clear_pointer (&priv->session, MFXClose);
|
|
|
|
gst_clear_object (&priv->allocator);
|
|
|
|
gst_clear_object (&priv->device);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_surface_clear (GstQsvDecoderSurface * surface)
|
|
|
|
{
|
|
|
|
if (!surface)
|
|
|
|
return;
|
|
|
|
|
|
|
|
memset (surface, 0, sizeof (GstQsvDecoderSurface));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_qsv_decoder_task_clear (GstQsvDecoderTask * task)
|
|
|
|
{
|
|
|
|
if (!task)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (task->surface) {
|
|
|
|
task->surface->need_output = FALSE;
|
|
|
|
if (task->surface->frame && task->surface->surface.Data.Locked == 0)
|
|
|
|
gst_qsv_frame_set_buffer (task->surface->frame, nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
task->surface = nullptr;
|
|
|
|
task->sync_point = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstQsvDecoderSurface *
|
|
|
|
gst_qsv_decoder_get_next_surface (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderSurface *surface = nullptr;
|
|
|
|
GstBuffer *buffer;
|
|
|
|
GstFlowReturn ret;
|
|
|
|
|
|
|
|
/* Clear unlocked frames as well */
|
|
|
|
for (guint i = 0; i < priv->surface_pool->len; i++) {
|
|
|
|
GstQsvDecoderSurface *iter =
|
|
|
|
&g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
|
|
|
|
|
|
|
|
if (iter->surface.Data.Locked > 0 || iter->need_output)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
gst_qsv_frame_set_buffer (iter->frame, nullptr);
|
|
|
|
|
|
|
|
if (!surface)
|
|
|
|
surface = iter;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!surface) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to find unlocked surface");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = gst_buffer_pool_acquire_buffer (priv->internal_pool, &buffer, nullptr);
|
|
|
|
if (ret != GST_FLOW_OK) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to allocate buffer");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_qsv_frame_set_buffer (surface->frame, buffer);
|
|
|
|
|
|
|
|
return surface;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstQsvDecoderTask *
|
|
|
|
gst_qsv_decoder_get_next_task (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderTask *task;
|
|
|
|
|
|
|
|
task = &g_array_index (priv->task_pool,
|
|
|
|
GstQsvDecoderTask, priv->next_task_index);
|
|
|
|
priv->next_task_index++;
|
|
|
|
priv->next_task_index %= priv->task_pool->len;
|
|
|
|
|
|
|
|
return task;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstVideoCodecFrame *
|
|
|
|
gst_qsv_decoder_find_output_frame (GstQsvDecoder * self, GstClockTime pts)
|
|
|
|
{
|
|
|
|
GList *frames, *iter;
|
|
|
|
GstVideoCodecFrame *ret = nullptr;
|
|
|
|
GstVideoCodecFrame *closest = nullptr;
|
|
|
|
guint64 min_pts_abs_diff = 0;
|
|
|
|
|
|
|
|
/* give up, just returns the oldest frame */
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (pts))
|
|
|
|
return gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
|
|
|
|
|
|
|
|
frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
|
|
|
|
|
|
|
|
for (iter = frames; iter; iter = g_list_next (iter)) {
|
|
|
|
GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
|
|
|
|
guint64 abs_diff;
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (pts == frame->pts) {
|
|
|
|
ret = frame;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (pts >= frame->pts)
|
|
|
|
abs_diff = pts - frame->pts;
|
|
|
|
else
|
|
|
|
abs_diff = frame->pts - pts;
|
|
|
|
|
|
|
|
if (!closest || abs_diff < min_pts_abs_diff) {
|
|
|
|
closest = frame;
|
|
|
|
min_pts_abs_diff = abs_diff;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ret && closest)
|
|
|
|
ret = closest;
|
|
|
|
|
|
|
|
if (ret) {
|
|
|
|
gst_video_codec_frame_ref (ret);
|
|
|
|
|
|
|
|
/* Release older frames, it can happen if input buffer holds only single
|
|
|
|
* field in case of H264 */
|
|
|
|
for (iter = frames; iter; iter = g_list_next (iter)) {
|
|
|
|
GstVideoCodecFrame *frame = (GstVideoCodecFrame *) iter->data;
|
|
|
|
|
|
|
|
if (frame == ret)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (frame->pts))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (frame->pts < ret->pts) {
|
|
|
|
gst_video_decoder_release_frame (GST_VIDEO_DECODER (self),
|
|
|
|
gst_video_codec_frame_ref (frame));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
ret = gst_video_decoder_get_oldest_frame (GST_VIDEO_DECODER (self));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (frames)
|
|
|
|
g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_finish_frame (GstQsvDecoder * self, GstQsvDecoderTask * task,
|
|
|
|
gboolean flushing)
|
|
|
|
{
|
|
|
|
GstVideoDecoder *vdec = GST_VIDEO_DECODER (self);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
2022-10-27 18:25:38 +00:00
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
2022-02-19 18:59:32 +00:00
|
|
|
mfxStatus status;
|
|
|
|
GstVideoCodecFrame *frame;
|
|
|
|
GstClockTime pts = GST_CLOCK_TIME_NONE;
|
|
|
|
GstBuffer *buffer;
|
|
|
|
guint retry_count = 0;
|
|
|
|
/* magic number */
|
|
|
|
const guint retry_threshold = 100;
|
|
|
|
GstQsvDecoderSurface *surface = task->surface;
|
|
|
|
GstBufferPool *pool;
|
|
|
|
gboolean force_copy = FALSE;
|
|
|
|
|
|
|
|
g_assert (surface != nullptr);
|
|
|
|
g_assert (task->sync_point != nullptr);
|
|
|
|
|
|
|
|
status = MFX_ERR_NONE;
|
|
|
|
do {
|
|
|
|
/* magic number 100 ms */
|
|
|
|
status = MFXVideoCORE_SyncOperation (priv->session, task->sync_point, 100);
|
|
|
|
|
|
|
|
/* Retry up to 10 sec (100 ms x 100 times), that should be enough time for
|
|
|
|
* decoding a frame using hardware */
|
|
|
|
if (status == MFX_WRN_IN_EXECUTION && retry_count < retry_threshold) {
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"Operation is still in execution, retry count (%d/%d)",
|
|
|
|
retry_count, retry_threshold);
|
|
|
|
retry_count++;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
} while (TRUE);
|
|
|
|
|
|
|
|
if (flushing) {
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
|
|
|
|
if (status == MFX_ERR_ABORTED) {
|
|
|
|
GST_INFO_OBJECT (self, "Operation was aborted");
|
|
|
|
return GST_FLOW_FLUSHING;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_WARNING_OBJECT (self, "SyncOperation returned %d (%s)",
|
|
|
|
QSV_STATUS_ARGS (status));
|
|
|
|
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
2022-10-27 18:25:38 +00:00
|
|
|
/* Handle non-keyframe resolution change */
|
|
|
|
if (klass->codec_id == MFX_CODEC_VP9) {
|
|
|
|
guint width, height;
|
|
|
|
|
|
|
|
if (surface->surface.Info.CropW > 0 && surface->surface.Info.CropH > 0) {
|
|
|
|
width = surface->surface.Info.CropW;
|
|
|
|
height = surface->surface.Info.CropH;
|
|
|
|
} else {
|
|
|
|
width = surface->surface.Info.Width;
|
|
|
|
height = surface->surface.Info.Height;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (width != (guint) priv->output_state->info.width ||
|
|
|
|
height != (guint) priv->output_state->info.height) {
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"VP9 resolution change %dx%d -> %dx%d, negotiate again",
|
|
|
|
priv->output_state->info.width, priv->output_state->info.height,
|
|
|
|
width, height);
|
|
|
|
if (!gst_qsv_decoder_negotiate_internal (vdec, &surface->surface.Info)) {
|
|
|
|
GST_ERROR_OBJECT (self, "Could not negotiate with downstream");
|
|
|
|
return GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* TODO: Use crop meta if supported by downstream.
|
|
|
|
* Most d3d11 elements supports crop meta */
|
|
|
|
if (width != (guint) priv->info.width
|
|
|
|
|| height != (guint) priv->info.height)
|
|
|
|
force_copy = TRUE;
|
|
|
|
}
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
pts = gst_qsv_timestamp_to_gst (surface->surface.Data.TimeStamp);
|
|
|
|
pool = gst_video_decoder_get_buffer_pool (vdec);
|
|
|
|
if (!pool) {
|
|
|
|
GST_ERROR_OBJECT (self, "Decoder doesn't hold buffer pool");
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
2022-04-17 13:25:14 +00:00
|
|
|
if (priv->use_video_memory) {
|
|
|
|
/* Copy decoded frame in case of reverse playback, too many bound frame to
|
|
|
|
* decoder may cause driver unhappy */
|
|
|
|
if (vdec->input_segment.rate < 0.0)
|
|
|
|
force_copy = TRUE;
|
|
|
|
} else if (!priv->have_video_meta) {
|
|
|
|
/* downstream does not support video meta, need copy */
|
2022-02-19 18:59:32 +00:00
|
|
|
force_copy = TRUE;
|
2022-04-17 13:25:14 +00:00
|
|
|
}
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
/* TODO: Handle non-zero crop-{x,y} position via crop meta or similar */
|
|
|
|
buffer = gst_qsv_allocator_download_frame (priv->allocator, force_copy,
|
2022-10-27 18:25:38 +00:00
|
|
|
surface->frame, &priv->output_state->info, pool);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_object_unref (pool);
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
|
|
|
|
if (!buffer) {
|
|
|
|
GST_ERROR_OBJECT (self, "No output buffer");
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->aligned_info.interlace_mode == GST_VIDEO_INTERLACE_MODE_MIXED) {
|
|
|
|
if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF) != 0) {
|
|
|
|
GST_BUFFER_FLAG_SET (buffer,
|
|
|
|
GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_INTERLACED);
|
|
|
|
} else if ((surface->surface.Info.PicStruct & MFX_PICSTRUCT_FIELD_BFF) != 0) {
|
|
|
|
GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
|
|
|
|
GST_BUFFER_FLAG_UNSET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
frame = gst_qsv_decoder_find_output_frame (self, pts);
|
|
|
|
if (frame) {
|
|
|
|
frame->pts = pts;
|
|
|
|
frame->output_buffer = buffer;
|
|
|
|
|
|
|
|
return gst_video_decoder_finish_frame (vdec, frame);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Empty available frame, something went wrong but we can just push this
|
|
|
|
* buffer */
|
|
|
|
GST_WARNING_OBJECT (self, "Failed to find corresponding frame");
|
|
|
|
GST_BUFFER_PTS (buffer) = pts;
|
|
|
|
|
|
|
|
return gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), buffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_decode_frame (GstQsvDecoder * self, mfxBitstream * bitstream,
|
|
|
|
gboolean flushing)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
mfxStatus status;
|
|
|
|
guint retry_count = 0;
|
|
|
|
/* magic number */
|
|
|
|
const guint retry_threshold = 1000;
|
|
|
|
GstQsvDecoderSurface *surface = nullptr;
|
|
|
|
GstFlowReturn ret;
|
|
|
|
|
|
|
|
do {
|
|
|
|
mfxFrameSurface1 *out_surface = nullptr;
|
|
|
|
GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
|
|
|
|
if (task->sync_point) {
|
|
|
|
ret = gst_qsv_decoder_finish_frame (self, task, flushing);
|
|
|
|
|
|
|
|
if (ret != GST_FLOW_OK)
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!surface)
|
|
|
|
surface = gst_qsv_decoder_get_next_surface (self);
|
|
|
|
|
|
|
|
if (!surface) {
|
|
|
|
GST_ERROR_OBJECT (self, "No available surface");
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
status = priv->decoder->DecodeFrameAsync (bitstream, &surface->surface,
|
|
|
|
&out_surface, &task->sync_point);
|
|
|
|
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_LOG_OBJECT (self, "DecodeFrameAsync returned %d (%s)",
|
|
|
|
QSV_STATUS_ARGS (status));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (out_surface) {
|
|
|
|
g_assert (task->sync_point != nullptr);
|
|
|
|
|
|
|
|
for (guint i = 0; i < priv->surface_pool->len; i++) {
|
|
|
|
GstQsvDecoderSurface *iter =
|
|
|
|
&g_array_index (priv->surface_pool, GstQsvDecoderSurface, i);
|
|
|
|
|
|
|
|
if (iter->surface.Data.MemId == out_surface->Data.MemId) {
|
|
|
|
task->surface = iter;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!task->surface) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to find surface");
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Make need-output to hold underlying GstBuffer until output happens */
|
|
|
|
task->surface->need_output = TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (status) {
|
|
|
|
case MFX_ERR_NONE:
|
2022-10-27 18:25:38 +00:00
|
|
|
case MFX_WRN_VIDEO_PARAM_CHANGED:
|
2022-02-19 18:59:32 +00:00
|
|
|
if (surface->surface.Data.Locked > 0)
|
|
|
|
surface = nullptr;
|
|
|
|
|
|
|
|
if (bitstream && bitstream->DataLength == 0)
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
break;
|
|
|
|
case MFX_ERR_MORE_SURFACE:
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
case MFX_ERR_INCOMPATIBLE_VIDEO_PARAM:
|
|
|
|
GST_DEBUG_OBJECT (self, "Found new sequence");
|
|
|
|
return GST_QSV_DECODER_FLOW_NEW_SEQUENCE;
|
|
|
|
case MFX_ERR_MORE_DATA:
|
|
|
|
return GST_VIDEO_DECODER_FLOW_NEED_DATA;
|
|
|
|
case MFX_WRN_DEVICE_BUSY:
|
|
|
|
GST_LOG_OBJECT (self, "GPU is busy, retry count (%d/%d)",
|
|
|
|
retry_count, retry_threshold);
|
|
|
|
|
|
|
|
if (retry_count > retry_threshold) {
|
|
|
|
GST_ERROR_OBJECT (self, "Give up");
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
retry_count++;
|
|
|
|
|
|
|
|
/* Magic number 1ms */
|
|
|
|
g_usleep (1000);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
if (status < MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Got error %d (%s)",
|
|
|
|
QSV_STATUS_ARGS (status));
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} while (TRUE);
|
|
|
|
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_drain_internal (GstQsvDecoder * self, gboolean flushing)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
if (!priv->session || !priv->decoder)
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
|
|
|
|
do {
|
|
|
|
ret = gst_qsv_decoder_decode_frame (self, nullptr, flushing);
|
|
|
|
} while (ret != GST_VIDEO_DECODER_FLOW_NEED_DATA && ret >= GST_FLOW_OK);
|
|
|
|
|
|
|
|
for (guint i = 0; i < priv->task_pool->len; i++) {
|
|
|
|
GstQsvDecoderTask *task = gst_qsv_decoder_get_next_task (self);
|
|
|
|
|
|
|
|
if (!task->sync_point)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
ret = gst_qsv_decoder_finish_frame (self, task, flushing);
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (ret) {
|
|
|
|
case GST_VIDEO_DECODER_FLOW_NEED_DATA:
|
|
|
|
case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_set_format (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecState * state)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
|
|
|
GstQuery *query;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Set format with %" GST_PTR_FORMAT, state->caps);
|
|
|
|
|
|
|
|
gst_qsv_decoder_drain_internal (self, FALSE);
|
|
|
|
|
|
|
|
g_clear_pointer (&priv->input_state, gst_video_codec_state_unref);
|
|
|
|
|
|
|
|
priv->input_state = gst_video_codec_state_ref (state);
|
|
|
|
|
|
|
|
memset (&priv->video_param, 0, sizeof (mfxVideoParam));
|
|
|
|
priv->video_param.mfx.CodecId = klass->codec_id;
|
|
|
|
|
|
|
|
/* If upstream is live, we will use single async-depth for low-latency
|
|
|
|
* decoding */
|
|
|
|
query = gst_query_new_latency ();
|
|
|
|
if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (self), query))
|
|
|
|
gst_query_parse_latency (query, &priv->is_live, nullptr, nullptr);
|
|
|
|
gst_query_unref (query);
|
|
|
|
|
|
|
|
/* We will open decoder later once sequence header is parsed */
|
2022-04-16 16:27:10 +00:00
|
|
|
if (klass->set_format)
|
|
|
|
return klass->set_format (self, state);
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_prepare_d3d11_pool (GstQsvDecoder * self,
|
|
|
|
GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstStructure *config;
|
|
|
|
GstD3D11AllocationParams *params;
|
|
|
|
GstD3D11Device *device = GST_D3D11_DEVICE_CAST (priv->device);
|
2022-11-02 20:12:31 +00:00
|
|
|
guint bind_flags = 0;
|
|
|
|
GstD3D11Format d3d11_format;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Use d3d11 memory pool");
|
|
|
|
|
|
|
|
priv->internal_pool = gst_d3d11_buffer_pool_new (device);
|
|
|
|
config = gst_buffer_pool_get_config (priv->internal_pool);
|
2022-11-02 20:12:31 +00:00
|
|
|
|
|
|
|
gst_d3d11_device_get_format (device, GST_VIDEO_INFO_FORMAT (info),
|
|
|
|
&d3d11_format);
|
|
|
|
|
|
|
|
/* May not support DOV, specifically RGB output case */
|
|
|
|
if ((d3d11_format.format_support[0] &
|
|
|
|
(guint) D3D11_FORMAT_SUPPORT_DECODER_OUTPUT) != 0) {
|
|
|
|
bind_flags |= D3D11_BIND_DECODER;
|
|
|
|
} else if ((d3d11_format.format_support[0] &
|
|
|
|
(guint) D3D11_FORMAT_SUPPORT_RENDER_TARGET) != 0) {
|
|
|
|
bind_flags |= D3D11_BIND_RENDER_TARGET;
|
|
|
|
}
|
2022-02-19 18:59:32 +00:00
|
|
|
/* Bind to shader resource as well for this texture can be used
|
|
|
|
* in generic pixel shader */
|
2022-11-02 20:12:31 +00:00
|
|
|
if ((d3d11_format.format_support[0] & (guint)
|
|
|
|
D3D11_FORMAT_SUPPORT_SHADER_SAMPLE) != 0)
|
|
|
|
bind_flags |= D3D11_BIND_SHADER_RESOURCE;
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
params = gst_d3d11_allocation_params_new (device, info,
|
2022-11-02 20:12:31 +00:00
|
|
|
GST_D3D11_ALLOCATION_FLAG_DEFAULT, bind_flags, 0);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_d3d11_allocation_params_alignment (params, align);
|
|
|
|
gst_buffer_pool_config_set_d3d11_allocation_params (config, params);
|
|
|
|
gst_d3d11_allocation_params_free (params);
|
|
|
|
gst_buffer_pool_config_set_params (config, caps,
|
|
|
|
GST_VIDEO_INFO_SIZE (info), 0, 0);
|
|
|
|
gst_buffer_pool_set_config (priv->internal_pool, config);
|
|
|
|
gst_buffer_pool_set_active (priv->internal_pool, TRUE);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_prepare_system_pool (GstQsvDecoder * self,
|
|
|
|
GstCaps * caps, GstVideoInfo * info, GstVideoAlignment * align)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstStructure *config;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Use system memory pool");
|
|
|
|
|
|
|
|
priv->internal_pool = gst_video_buffer_pool_new ();
|
|
|
|
config = gst_buffer_pool_get_config (priv->internal_pool);
|
|
|
|
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
|
|
|
|
gst_buffer_pool_config_add_option (config,
|
|
|
|
GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
|
|
|
|
gst_buffer_pool_config_set_video_alignment (config, align);
|
|
|
|
gst_buffer_pool_config_set_params (config,
|
|
|
|
caps, GST_VIDEO_INFO_SIZE (info), 0, 0);
|
|
|
|
|
|
|
|
gst_buffer_pool_set_config (priv->internal_pool, config);
|
|
|
|
gst_buffer_pool_set_active (priv->internal_pool, TRUE);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_prepare_pool (GstQsvDecoder * self, mfxU16 * io_pattern)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
gboolean ret = FALSE;
|
|
|
|
GstCaps *caps;
|
|
|
|
GstVideoAlignment align;
|
|
|
|
|
|
|
|
if (priv->internal_pool) {
|
|
|
|
gst_buffer_pool_set_active (priv->internal_pool, FALSE);
|
|
|
|
gst_clear_object (&priv->internal_pool);
|
|
|
|
}
|
|
|
|
|
|
|
|
caps = gst_video_info_to_caps (&priv->info);
|
|
|
|
if (!caps) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to convet video-info to caps");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_video_alignment_reset (&align);
|
2022-06-23 12:35:23 +00:00
|
|
|
align.padding_right = priv->aligned_info.width - priv->info.width;
|
2022-02-19 18:59:32 +00:00
|
|
|
align.padding_bottom = priv->aligned_info.height - priv->info.height;
|
|
|
|
|
|
|
|
/* TODO: Add Linux video memory (VA/DMABuf) support */
|
|
|
|
#ifdef G_OS_WIN32
|
2022-04-17 13:25:14 +00:00
|
|
|
if (priv->use_video_memory) {
|
|
|
|
priv->mem_type = GST_QSV_VIDEO_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
|
|
|
|
*io_pattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
2022-04-17 13:25:14 +00:00
|
|
|
ret = gst_qsv_decoder_prepare_d3d11_pool (self, caps, &priv->info, &align);
|
|
|
|
}
|
2022-02-19 18:59:32 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
if (!ret) {
|
|
|
|
priv->mem_type = GST_QSV_SYSTEM_MEMORY | GST_QSV_DECODER_OUT_MEMORY;
|
|
|
|
*io_pattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
|
|
|
|
|
|
|
|
ret = gst_qsv_decoder_prepare_system_pool (self, caps, &priv->info, &align);
|
|
|
|
}
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_init_session (GstQsvDecoder * self)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
mfxVideoParam *param = &priv->video_param;
|
|
|
|
mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
|
|
|
|
MFXVideoDECODE *decoder_handle = nullptr;
|
|
|
|
mfxFrameAllocRequest request;
|
|
|
|
mfxStatus status;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Init session");
|
|
|
|
|
|
|
|
memset (&request, 0, sizeof (mfxFrameAllocRequest));
|
|
|
|
|
|
|
|
gst_qsv_decoder_reset (self);
|
|
|
|
|
|
|
|
if (!gst_qsv_decoder_prepare_pool (self, ¶m->IOPattern)) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to prepare pool");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
param->AsyncDepth = priv->is_live ? 1 : 4;
|
|
|
|
|
|
|
|
decoder_handle = new MFXVideoDECODE (priv->session);
|
|
|
|
|
|
|
|
/* Additional 4 frames for margin. Actually large pool size would be fine
|
|
|
|
* because we don't pre-allocate underlying output memory objects */
|
|
|
|
gst_qsv_allocator_set_options (priv->allocator, param->AsyncDepth + 4, TRUE);
|
|
|
|
|
|
|
|
status = decoder_handle->Query (param, param);
|
|
|
|
QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Query);
|
|
|
|
|
|
|
|
status = decoder_handle->QueryIOSurf (param, &request);
|
|
|
|
QSV_CHECK_STATUS (self, status, MFXVideoDECODE::QueryIOSurf);
|
|
|
|
|
|
|
|
status = decoder_handle->Init (param);
|
|
|
|
QSV_CHECK_STATUS (self, status, MFXVideoDECODE::Init);
|
|
|
|
|
|
|
|
status = decoder_handle->GetVideoParam (param);
|
|
|
|
QSV_CHECK_STATUS (self, status, MFXVideoDECODE::GetVideoParam);
|
|
|
|
|
|
|
|
/* In case that we use video memory, MFXVideoDECODE::Init() will invoke
|
|
|
|
* mfxFrameAllocator::Alloc(). Pull the pre-allocated dummy GstQsvFrame
|
|
|
|
* objects here and fill with GstBuffer later when needed */
|
|
|
|
if (GST_QSV_MEM_TYPE_IS_SYSTEM (priv->mem_type)) {
|
|
|
|
mfxFrameAllocator *alloc_handle =
|
|
|
|
gst_qsv_allocator_get_allocator_handle (priv->allocator);
|
|
|
|
|
|
|
|
request.Type |= MFX_MEMTYPE_EXTERNAL_FRAME;
|
|
|
|
status = alloc_handle->Alloc ((mfxHDL) priv->allocator, &request,
|
|
|
|
&priv->response);
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to allocate system memory frames");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
} else if (!gst_qsv_allocator_get_cached_response (priv->allocator,
|
|
|
|
&priv->response)) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to get cached response");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
g_array_set_size (priv->surface_pool, priv->response.NumFrameActual);
|
|
|
|
for (guint i = 0; i < priv->surface_pool->len; i++) {
|
|
|
|
GstQsvDecoderSurface *surface = &g_array_index (priv->surface_pool,
|
|
|
|
GstQsvDecoderSurface, i);
|
|
|
|
GstBuffer *buf;
|
|
|
|
|
|
|
|
gst_qsv_decoder_surface_clear (surface);
|
|
|
|
surface->surface.Info = *frame_info;
|
|
|
|
surface->surface.Data.MemId = priv->response.mids[i];
|
|
|
|
|
|
|
|
/* holds casted object without ref, to make code cleaner */
|
|
|
|
surface->frame = (GstQsvFrame *) surface->surface.Data.MemId;
|
|
|
|
|
|
|
|
/* This frame must not hold buffer at this moment */
|
|
|
|
buf = gst_qsv_frame_peek_buffer (surface->frame);
|
|
|
|
g_assert (buf == nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
g_array_set_size (priv->task_pool, param->AsyncDepth);
|
|
|
|
for (guint i = 0; i < priv->task_pool->len; i++) {
|
|
|
|
GstQsvDecoderTask *task = &g_array_index (priv->task_pool,
|
|
|
|
GstQsvDecoderTask, i);
|
|
|
|
|
|
|
|
gst_qsv_decoder_task_clear (task);
|
|
|
|
}
|
2022-04-16 14:51:05 +00:00
|
|
|
priv->next_task_index = 0;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
priv->decoder = decoder_handle;
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
|
|
|
|
error:
|
|
|
|
if (decoder_handle)
|
|
|
|
delete decoder_handle;
|
|
|
|
|
|
|
|
gst_qsv_decoder_reset (self);
|
|
|
|
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2022-10-27 18:25:38 +00:00
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_negotiate_internal (GstVideoDecoder * decoder,
|
|
|
|
const mfxFrameInfo * frame_info)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
guint width, height;
|
|
|
|
|
|
|
|
width = frame_info->Width;
|
|
|
|
height = frame_info->Height;
|
|
|
|
|
|
|
|
if (frame_info->CropW > 0 && frame_info->CropH > 0) {
|
|
|
|
width = frame_info->CropW;
|
|
|
|
height = frame_info->CropH;
|
|
|
|
}
|
|
|
|
|
|
|
|
g_clear_pointer (&priv->output_state, gst_video_codec_state_unref);
|
|
|
|
priv->output_state =
|
|
|
|
gst_video_decoder_set_interlaced_output_state (GST_VIDEO_DECODER (self),
|
|
|
|
GST_VIDEO_INFO_FORMAT (&priv->info),
|
|
|
|
GST_VIDEO_INFO_INTERLACE_MODE (&priv->info),
|
|
|
|
width, height, priv->input_state);
|
|
|
|
|
|
|
|
priv->output_state->caps = gst_video_info_to_caps (&priv->output_state->info);
|
|
|
|
priv->use_video_memory = FALSE;
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
GstCaps *peer_caps =
|
|
|
|
gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self));
|
|
|
|
GST_DEBUG_OBJECT (self, "Allowed caps %" GST_PTR_FORMAT, peer_caps);
|
|
|
|
|
|
|
|
if (!peer_caps || gst_caps_is_any (peer_caps)) {
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"cannot determine output format, use system memory");
|
|
|
|
} else {
|
|
|
|
GstCapsFeatures *features;
|
|
|
|
guint size = gst_caps_get_size (peer_caps);
|
|
|
|
|
|
|
|
for (guint i = 0; i < size; i++) {
|
|
|
|
features = gst_caps_get_features (peer_caps, i);
|
|
|
|
|
|
|
|
if (!features)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (gst_caps_features_contains (features,
|
|
|
|
GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) {
|
|
|
|
priv->use_video_memory = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
gst_clear_caps (&peer_caps);
|
|
|
|
|
|
|
|
if (priv->use_video_memory) {
|
|
|
|
GST_DEBUG_OBJECT (self, "Downstream supports D3D11 memory");
|
|
|
|
gst_caps_set_features (priv->output_state->caps, 0,
|
|
|
|
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr));
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"Negotiating with %" GST_PTR_FORMAT, priv->output_state->caps);
|
|
|
|
|
|
|
|
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
|
|
|
|
}
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_negotiate (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
|
|
|
guint width, height;
|
|
|
|
guint coded_width, coded_height;
|
|
|
|
guint aligned_width, aligned_height;
|
|
|
|
mfxVideoParam *param = &priv->video_param;
|
|
|
|
mfxFrameInfo *frame_info = ¶m->mfx.FrameInfo;
|
|
|
|
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
|
|
|
|
GstVideoInterlaceMode interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
|
|
|
|
|
|
|
width = coded_width = frame_info->Width;
|
|
|
|
height = coded_height = frame_info->Height;
|
|
|
|
|
|
|
|
if (frame_info->CropW > 0 && frame_info->CropH > 0) {
|
|
|
|
width = frame_info->CropW;
|
|
|
|
height = frame_info->CropH;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (frame_info->FourCC) {
|
|
|
|
case MFX_FOURCC_NV12:
|
|
|
|
format = GST_VIDEO_FORMAT_NV12;
|
|
|
|
break;
|
2022-04-16 17:14:50 +00:00
|
|
|
case MFX_FOURCC_P010:
|
|
|
|
format = GST_VIDEO_FORMAT_P010_10LE;
|
|
|
|
break;
|
2022-10-27 18:25:38 +00:00
|
|
|
case MFX_FOURCC_P016:
|
|
|
|
format = GST_VIDEO_FORMAT_P016_LE;
|
|
|
|
break;
|
2022-11-02 20:12:31 +00:00
|
|
|
case MFX_FOURCC_RGB4:
|
|
|
|
format = GST_VIDEO_FORMAT_BGRA;
|
|
|
|
break;
|
2022-02-19 18:59:32 +00:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2022-11-02 20:12:31 +00:00
|
|
|
if (klass->codec_id == MFX_CODEC_JPEG) {
|
|
|
|
if (param->mfx.JPEGChromaFormat == MFX_CHROMAFORMAT_YUV422) {
|
|
|
|
format = GST_VIDEO_FORMAT_YUY2;
|
|
|
|
frame_info->FourCC = MFX_FOURCC_YUY2;
|
|
|
|
frame_info->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
|
|
|
|
} else if (param->mfx.JPEGColorFormat == MFX_JPEG_COLORFORMAT_RGB) {
|
|
|
|
format = GST_VIDEO_FORMAT_BGRA;
|
|
|
|
frame_info->FourCC = MFX_FOURCC_RGB4;
|
|
|
|
frame_info->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
if (format == GST_VIDEO_FORMAT_UNKNOWN) {
|
|
|
|
GST_ERROR_OBJECT (self, "Unknown video format");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
aligned_width = GST_ROUND_UP_16 (coded_width);
|
|
|
|
if (klass->codec_id == MFX_CODEC_AVC) {
|
|
|
|
if (frame_info->PicStruct == MFX_PICSTRUCT_PROGRESSIVE) {
|
|
|
|
aligned_height = GST_ROUND_UP_16 (coded_height);
|
|
|
|
} else {
|
|
|
|
aligned_height = GST_ROUND_UP_32 (coded_height);
|
|
|
|
/* In theory, tff/bff can be altered in a sequence */
|
|
|
|
interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
aligned_height = GST_ROUND_UP_16 (coded_height);
|
|
|
|
}
|
|
|
|
|
|
|
|
frame_info->Width = aligned_width;
|
|
|
|
frame_info->Height = aligned_height;
|
|
|
|
|
|
|
|
gst_video_info_set_interlaced_format (&priv->info, format,
|
|
|
|
interlace_mode, width, height);
|
|
|
|
gst_video_info_set_interlaced_format (&priv->aligned_info, format,
|
|
|
|
interlace_mode, aligned_width, aligned_height);
|
|
|
|
|
2022-10-27 18:25:38 +00:00
|
|
|
return gst_qsv_decoder_negotiate_internal (decoder,
|
|
|
|
&priv->video_param.mfx.FrameInfo);
|
2022-02-19 18:59:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef G_OS_WIN32
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
GstCaps *outcaps;
|
|
|
|
GstBufferPool *pool = nullptr;
|
|
|
|
guint n, size, min = 0, max = 0;
|
|
|
|
GstVideoInfo vinfo;
|
|
|
|
GstStructure *config;
|
|
|
|
GstD3D11AllocationParams *d3d11_params;
|
|
|
|
gboolean use_d3d11_pool;
|
|
|
|
GstD3D11Device *device = GST_D3D11_DEVICE (priv->device);
|
|
|
|
|
|
|
|
gst_query_parse_allocation (query, &outcaps, nullptr);
|
|
|
|
|
|
|
|
if (!outcaps) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "No output caps");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2022-04-17 13:25:14 +00:00
|
|
|
priv->have_video_meta = gst_query_find_allocation_meta (query,
|
2022-02-19 18:59:32 +00:00
|
|
|
GST_VIDEO_META_API_TYPE, nullptr);
|
2022-04-17 13:25:14 +00:00
|
|
|
use_d3d11_pool = priv->use_video_memory;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
|
|
|
gst_video_info_from_caps (&vinfo, outcaps);
|
|
|
|
n = gst_query_get_n_allocation_pools (query);
|
|
|
|
if (n > 0)
|
|
|
|
gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
|
|
|
|
|
2022-04-17 13:25:14 +00:00
|
|
|
if (pool && use_d3d11_pool) {
|
|
|
|
if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"Downstream pool is not d3d11, will create new one");
|
|
|
|
gst_clear_object (&pool);
|
|
|
|
} else {
|
|
|
|
GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
|
|
|
|
if (dpool->device != device) {
|
|
|
|
GST_DEBUG_OBJECT (self, "Different device, will create new one");
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_clear_object (&pool);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!pool) {
|
|
|
|
if (use_d3d11_pool)
|
|
|
|
pool = gst_d3d11_buffer_pool_new (device);
|
|
|
|
else
|
|
|
|
pool = gst_video_buffer_pool_new ();
|
|
|
|
|
|
|
|
size = (guint) vinfo.size;
|
|
|
|
}
|
|
|
|
|
|
|
|
config = gst_buffer_pool_get_config (pool);
|
|
|
|
gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
|
|
|
|
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
|
|
|
|
|
|
|
|
/* Decoder will use internal pool to output but this pool is required for
|
|
|
|
* copying in case of reverse playback */
|
|
|
|
if (use_d3d11_pool) {
|
2022-11-02 20:12:31 +00:00
|
|
|
guint bind_flags = 0;
|
|
|
|
GstD3D11Format d3d11_format;
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
|
2022-06-23 16:21:48 +00:00
|
|
|
if (!d3d11_params) {
|
2022-02-19 18:59:32 +00:00
|
|
|
d3d11_params = gst_d3d11_allocation_params_new (device, &vinfo,
|
2022-06-23 16:21:48 +00:00
|
|
|
GST_D3D11_ALLOCATION_FLAG_DEFAULT, 0, 0);
|
|
|
|
}
|
2022-11-02 20:12:31 +00:00
|
|
|
|
|
|
|
gst_d3d11_device_get_format (device, GST_VIDEO_INFO_FORMAT (&vinfo),
|
|
|
|
&d3d11_format);
|
|
|
|
|
2022-02-19 18:59:32 +00:00
|
|
|
/* Use both render target (for videoprocessor) and shader resource
|
|
|
|
* for (pixel shader) bind flags for downstream to be able to use consistent
|
|
|
|
* conversion path even when we copy textures */
|
2022-11-02 20:12:31 +00:00
|
|
|
if ((d3d11_format.format_support[0] &
|
|
|
|
(guint) D3D11_FORMAT_SUPPORT_RENDER_TARGET) != 0) {
|
|
|
|
bind_flags |= D3D11_BIND_RENDER_TARGET;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((d3d11_format.format_support[0] & (guint)
|
|
|
|
D3D11_FORMAT_SUPPORT_SHADER_SAMPLE) != 0)
|
|
|
|
bind_flags |= D3D11_BIND_SHADER_RESOURCE;
|
2022-02-19 18:59:32 +00:00
|
|
|
|
2023-11-24 12:18:38 +00:00
|
|
|
gst_d3d11_allocation_params_set_bind_flags (d3d11_params, bind_flags);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
|
|
|
|
gst_d3d11_allocation_params_free (d3d11_params);
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_buffer_pool_set_config (pool, config);
|
|
|
|
/* d3d11 buffer pool will update buffer size based on allocated texture,
|
|
|
|
* get size from config again */
|
|
|
|
config = gst_buffer_pool_get_config (pool);
|
|
|
|
gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
|
|
|
|
gst_structure_free (config);
|
|
|
|
|
|
|
|
if (n > 0)
|
|
|
|
gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
|
|
|
|
else
|
|
|
|
gst_query_add_allocation_pool (query, pool, size, min, max);
|
|
|
|
gst_object_unref (pool);
|
|
|
|
|
|
|
|
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
|
|
|
|
query);
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
|
|
|
|
{
|
|
|
|
/* TODO: add VA support */
|
|
|
|
return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
|
|
|
|
query);
|
|
|
|
}
|
|
|
|
#endif /* G_OS_WIN32 */
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_handle_context_query (GstQsvDecoder * self, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
|
2022-03-25 12:03:02 +00:00
|
|
|
#ifdef G_OS_WIN32
|
2022-02-19 18:59:32 +00:00
|
|
|
return gst_d3d11_handle_context_query (GST_ELEMENT (self), query,
|
|
|
|
(GstD3D11Device *) priv->device);
|
2022-03-25 12:03:02 +00:00
|
|
|
#else
|
|
|
|
return gst_va_handle_context_query (GST_ELEMENT (self), query,
|
|
|
|
(GstVaDisplay *) priv->device);
|
2022-02-19 18:59:32 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_sink_query (GstVideoDecoder * decoder, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_CONTEXT:
|
|
|
|
if (gst_qsv_decoder_handle_context_query (self, query))
|
|
|
|
return TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_src_query (GstVideoDecoder * decoder, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_CONTEXT:
|
|
|
|
if (gst_qsv_decoder_handle_context_query (self, query))
|
|
|
|
return TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_handle_frame (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
2022-04-16 16:27:10 +00:00
|
|
|
GstQsvDecoderClass *klass = GST_QSV_DECODER_GET_CLASS (self);
|
2022-02-19 18:59:32 +00:00
|
|
|
GstQsvDecoderPrivate *priv = self->priv;
|
|
|
|
mfxBitstream bs;
|
|
|
|
GstMapInfo info;
|
|
|
|
mfxStatus status;
|
|
|
|
GstFlowReturn ret = GST_FLOW_ERROR;
|
|
|
|
gboolean was_reconfigured = FALSE;
|
2022-04-16 16:27:10 +00:00
|
|
|
GstBuffer *input_buf = nullptr;
|
|
|
|
|
|
|
|
if (klass->process_input) {
|
|
|
|
input_buf = klass->process_input (self, priv->decoder ? FALSE : TRUE,
|
|
|
|
frame->input_buffer);
|
|
|
|
} else {
|
|
|
|
input_buf = gst_buffer_ref (frame->input_buffer);
|
|
|
|
}
|
2022-02-19 18:59:32 +00:00
|
|
|
|
2022-04-16 16:27:10 +00:00
|
|
|
if (!input_buf || !gst_buffer_map (input_buf, &info, GST_MAP_READ)) {
|
2022-02-19 18:59:32 +00:00
|
|
|
GST_ERROR_OBJECT (self, "Failed to map input buffer");
|
2022-04-16 16:27:10 +00:00
|
|
|
gst_clear_buffer (&input_buf);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_video_decoder_release_frame (decoder, frame);
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
memset (&bs, 0, sizeof (mfxBitstream));
|
|
|
|
|
|
|
|
bs.Data = (mfxU8 *) info.data;
|
|
|
|
bs.DataLength = bs.MaxLength = (mfxU32) info.size;
|
|
|
|
bs.TimeStamp = gst_qsv_timestamp_from_gst (frame->pts);
|
|
|
|
|
|
|
|
new_sequence:
|
|
|
|
if (!priv->decoder) {
|
|
|
|
status = MFXVideoDECODE_DecodeHeader (priv->session,
|
|
|
|
&bs, &priv->video_param);
|
|
|
|
|
|
|
|
if (status != MFX_ERR_NONE) {
|
|
|
|
if (status == MFX_ERR_MORE_DATA) {
|
|
|
|
GST_WARNING_OBJECT (self, "Need more date to parse header");
|
|
|
|
ret = GST_FLOW_OK;
|
|
|
|
} else {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to parse header %d (%s)",
|
|
|
|
QSV_STATUS_ARGS (status));
|
|
|
|
}
|
|
|
|
|
|
|
|
goto unmap_and_error;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!gst_video_decoder_negotiate (decoder)) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to negotiate");
|
|
|
|
ret = GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
goto unmap_and_error;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!gst_qsv_decoder_init_session (self)) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to init session");
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!priv->decoder) {
|
|
|
|
GST_ERROR_OBJECT (self, "Decoder object was not configured");
|
|
|
|
ret = GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
goto unmap_and_error;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = gst_qsv_decoder_decode_frame (self, &bs, FALSE);
|
|
|
|
|
|
|
|
switch (ret) {
|
|
|
|
case GST_QSV_DECODER_FLOW_NEW_SEQUENCE:
|
|
|
|
if (!was_reconfigured) {
|
|
|
|
gst_qsv_decoder_drain_internal (self, FALSE);
|
|
|
|
gst_qsv_decoder_reset (self);
|
|
|
|
was_reconfigured = TRUE;
|
|
|
|
|
|
|
|
goto new_sequence;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = GST_FLOW_ERROR;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_DECODER_FLOW_NEED_DATA:
|
|
|
|
ret = GST_FLOW_OK;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2022-04-16 16:27:10 +00:00
|
|
|
gst_buffer_unmap (input_buf, &info);
|
|
|
|
gst_buffer_unref (input_buf);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_video_codec_frame_unref (frame);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
unmap_and_error:
|
2022-04-16 16:27:10 +00:00
|
|
|
gst_buffer_unmap (input_buf, &info);
|
|
|
|
gst_buffer_unref (input_buf);
|
2022-02-19 18:59:32 +00:00
|
|
|
gst_video_decoder_release_frame (decoder, frame);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_qsv_decoder_flush (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Flush");
|
|
|
|
|
|
|
|
gst_qsv_decoder_drain_internal (self, TRUE);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_finish (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Finish");
|
|
|
|
|
|
|
|
return gst_qsv_decoder_drain_internal (self, FALSE);
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_qsv_decoder_drain (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstQsvDecoder *self = GST_QSV_DECODER (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Drain");
|
|
|
|
|
|
|
|
return gst_qsv_decoder_drain_internal (self, FALSE);
|
|
|
|
}
|