d3d11videosink: Add support for drawing on application's own texture

Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
   by using "draw" signal action. Note that "draw" signal action
   should be happen before "begin-draw" signal handler is returned

NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.

NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.

NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.

* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.

For example, assume that application wants RGBA texture.
Then we can imagine following case.

"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
                             ^
                             |_ allocate new Direct3D texture for RGBA format

In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side

But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".

In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.

* What if we expose texture without conversion and application does
  conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
This commit is contained in:
Seungha Yang 2020-12-23 23:49:12 +09:00 committed by GStreamer Merge Bot
parent 66788366a0
commit 60e223f4fd
9 changed files with 1097 additions and 35 deletions

View file

@ -1,5 +1,6 @@
/* GStreamer /* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com> * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public
@ -32,6 +33,7 @@
#if (!GST_D3D11_WINAPI_ONLY_APP) #if (!GST_D3D11_WINAPI_ONLY_APP)
#include "gstd3d11window_win32.h" #include "gstd3d11window_win32.h"
#endif #endif
#include "gstd3d11window_dummy.h"
enum enum
{ {
@ -42,6 +44,7 @@ enum
PROP_FULLSCREEN_TOGGLE_MODE, PROP_FULLSCREEN_TOGGLE_MODE,
PROP_FULLSCREEN, PROP_FULLSCREEN,
PROP_RENDER_STATS, PROP_RENDER_STATS,
PROP_DRAW_ON_SHARED_TEXTURE,
}; };
#define DEFAULT_ADAPTER -1 #define DEFAULT_ADAPTER -1
@ -50,6 +53,20 @@ enum
#define DEFAULT_FULLSCREEN_TOGGLE_MODE GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_NONE #define DEFAULT_FULLSCREEN_TOGGLE_MODE GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_NONE
#define DEFAULT_FULLSCREEN FALSE #define DEFAULT_FULLSCREEN FALSE
#define DEFAULT_RENDER_STATS FALSE #define DEFAULT_RENDER_STATS FALSE
#define DEFAULT_DRAW_ON_SHARED_TEXTURE FALSE
enum
{
/* signals */
SIGNAL_BEGIN_DRAW,
/* actions */
SIGNAL_DRAW,
LAST_SIGNAL
};
static guint gst_d3d11_video_sink_signals[LAST_SIGNAL] = { 0, };
static GstStaticCaps pad_template_caps = static GstStaticCaps pad_template_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
@ -81,6 +98,7 @@ struct _GstD3D11VideoSink
GstD3D11WindowFullscreenToggleMode fullscreen_toggle_mode; GstD3D11WindowFullscreenToggleMode fullscreen_toggle_mode;
gboolean fullscreen; gboolean fullscreen;
gboolean render_stats; gboolean render_stats;
gboolean draw_on_shared_texture;
/* saved render rectangle until we have a window */ /* saved render rectangle until we have a window */
GstVideoRectangle render_rect; GstVideoRectangle render_rect;
@ -89,12 +107,24 @@ struct _GstD3D11VideoSink
GstBufferPool *fallback_pool; GstBufferPool *fallback_pool;
gboolean can_convert; gboolean can_convert;
gboolean have_video_processor; gboolean have_video_processor;
/* For drawing on user texture */
GstD3D11VideoSinkCallbacks callbacks;
gpointer user_data;
gboolean drawing;
GstBuffer *current_buffer;
GRecMutex draw_lock;
}; };
static void gst_d3d11_videosink_set_property (GObject * object, guint prop_id, static void gst_d3d11_videosink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
static void gst_d3d11_videosink_get_property (GObject * object, guint prop_id, static void gst_d3d11_videosink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
static void gst_d3d11_video_sink_finalize (GObject * object);
static gboolean
gst_d3d11_video_sink_draw_action (GstD3D11VideoSink * self,
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
guint64 release_key);
static void static void
gst_d3d11_video_sink_video_overlay_init (GstVideoOverlayInterface * iface); gst_d3d11_video_sink_video_overlay_init (GstVideoOverlayInterface * iface);
@ -142,6 +172,7 @@ gst_d3d11_video_sink_class_init (GstD3D11VideoSinkClass * klass)
gobject_class->set_property = gst_d3d11_videosink_set_property; gobject_class->set_property = gst_d3d11_videosink_set_property;
gobject_class->get_property = gst_d3d11_videosink_get_property; gobject_class->get_property = gst_d3d11_videosink_get_property;
gobject_class->finalize = gst_d3d11_video_sink_finalize;
g_object_class_install_property (gobject_class, PROP_ADAPTER, g_object_class_install_property (gobject_class, PROP_ADAPTER,
g_param_spec_int ("adapter", "Adapter", g_param_spec_int ("adapter", "Adapter",
@ -187,6 +218,74 @@ gst_d3d11_video_sink_class_init (GstD3D11VideoSinkClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif #endif
/**
* GstD3D11VideoSink:draw-on-shared-texture:
*
* Instrcut the sink to draw on a shared texture provided by user.
* User must watch "begin-draw" signal and should call "draw" method
* on the "begin-draw" signal handler.
*
* Currently supported formats for user texture are:
* - DXGI_FORMAT_R8G8B8A8_UNORM
* - DXGI_FORMAT_B8G8R8A8_UNORM
* - DXGI_FORMAT_R10G10B10A2_UNORM
*
* Since: 1.20
*/
g_object_class_install_property (gobject_class, PROP_DRAW_ON_SHARED_TEXTURE,
g_param_spec_boolean ("draw-on-shared-texture",
"Draw on shared texture",
"Draw on user provided shared texture instead of window. "
"When enabled, user can pass application's own texture to sink "
"by using \"draw\" action signal on \"begin-draw\" signal handler, "
"so that sink can draw video data on application's texture. "
"Supported texture formats for user texture are "
"DXGI_FORMAT_R8G8B8A8_UNORM, DXGI_FORMAT_B8G8R8A8_UNORM, and "
"DXGI_FORMAT_R10G10B10A2_UNORM.",
DEFAULT_DRAW_ON_SHARED_TEXTURE,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
/**
* GstD3D11VideoSink:begin-draw:
* @videosink: the d3d11videosink
*
* Signal that sink has a texture to draw. Application needs to invoke "draw"
* action signal before returning from "begin-draw" signal handler.
*
* Since: 1.20
*/
gst_d3d11_video_sink_signals[SIGNAL_BEGIN_DRAW] =
g_signal_new ("begin-draw", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstD3D11VideoSinkClass, begin_draw),
NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
/**
* GstD3D11VideoSink:draw:
* @videosink: the d3d11videosink
* @shard_handle: a pointer to HANDLE
* @texture_misc_flags: a D3D11_RESOURCE_MISC_FLAG value
* @acquire_key: a key value used for IDXGIKeyedMutex::AcquireSync
* @release_key: a key value used for IDXGIKeyedMutex::ReleaseSync
*
* Draws on shared texture. @shard_handle must be a valid pointer to HANDLE
* which was obtained via IDXGIResource::GetSharedHandle or
* IDXGIResource1::CreateSharedHandle.
*
* If texture was created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag,
* caller must specify valid @acquire_key and @release_key.
* Otherwise (i.e., created with D3D11_RESOURCE_MISC_SHARED flag),
* @acquire_key and @release_key will be ignored.
*
* Since: 1.20
*/
gst_d3d11_video_sink_signals[SIGNAL_DRAW] =
g_signal_new ("draw", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstD3D11VideoSinkClass, draw), NULL, NULL, NULL,
G_TYPE_BOOLEAN, 4, G_TYPE_POINTER, G_TYPE_UINT, G_TYPE_UINT64,
G_TYPE_UINT64);
element_class->set_context = element_class->set_context =
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_set_context); GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_set_context);
@ -214,6 +313,8 @@ gst_d3d11_video_sink_class_init (GstD3D11VideoSinkClass * klass)
videosink_class->show_frame = videosink_class->show_frame =
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_show_frame); GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_show_frame);
klass->draw = gst_d3d11_video_sink_draw_action;
gst_type_mark_as_plugin_api (GST_D3D11_WINDOW_TOGGLE_MODE_GET_TYPE, 0); gst_type_mark_as_plugin_api (GST_D3D11_WINDOW_TOGGLE_MODE_GET_TYPE, 0);
} }
@ -226,6 +327,9 @@ gst_d3d11_video_sink_init (GstD3D11VideoSink * self)
self->fullscreen_toggle_mode = DEFAULT_FULLSCREEN_TOGGLE_MODE; self->fullscreen_toggle_mode = DEFAULT_FULLSCREEN_TOGGLE_MODE;
self->fullscreen = DEFAULT_FULLSCREEN; self->fullscreen = DEFAULT_FULLSCREEN;
self->render_stats = DEFAULT_RENDER_STATS; self->render_stats = DEFAULT_RENDER_STATS;
self->draw_on_shared_texture = DEFAULT_DRAW_ON_SHARED_TEXTURE;
g_rec_mutex_init (&self->draw_lock);
} }
static void static void
@ -270,6 +374,9 @@ gst_d3d11_videosink_set_property (GObject * object, guint prop_id,
self->render_stats = g_value_get_boolean (value); self->render_stats = g_value_get_boolean (value);
break; break;
#endif #endif
case PROP_DRAW_ON_SHARED_TEXTURE:
self->draw_on_shared_texture = g_value_get_boolean (value);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
@ -308,12 +415,25 @@ gst_d3d11_videosink_get_property (GObject * object, guint prop_id,
g_value_set_boolean (value, self->render_stats); g_value_set_boolean (value, self->render_stats);
break; break;
#endif #endif
case PROP_DRAW_ON_SHARED_TEXTURE:
g_value_set_boolean (value, self->draw_on_shared_texture);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
} }
} }
static void
gst_d3d11_video_sink_finalize (GObject * object)
{
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (object);
g_rec_mutex_clear (&self->draw_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void static void
gst_d3d11_video_sink_set_context (GstElement * element, GstContext * context) gst_d3d11_video_sink_set_context (GstElement * element, GstContext * context)
{ {
@ -622,6 +742,7 @@ gst_d3d11_video_sink_start (GstBaseSink * sink)
} }
g_object_get (self->device, "hardware", &is_hardware, NULL); g_object_get (self->device, "hardware", &is_hardware, NULL);
if (!is_hardware) { if (!is_hardware) {
GST_WARNING_OBJECT (self, "D3D11 device is running on software emulation"); GST_WARNING_OBJECT (self, "D3D11 device is running on software emulation");
self->can_convert = FALSE; self->can_convert = FALSE;
@ -640,6 +761,13 @@ gst_d3d11_video_sink_prepare_window (GstD3D11VideoSink * self)
if (self->window) if (self->window)
return TRUE; return TRUE;
if (self->draw_on_shared_texture) {
GST_INFO_OBJECT (self,
"Create dummy window for rendering on shared texture");
self->window = gst_d3d11_window_dummy_new (self->device);
return TRUE;
}
if (!self->window_id) if (!self->window_id)
gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (self)); gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (self));
@ -936,7 +1064,7 @@ static GstFlowReturn
gst_d3d11_video_sink_show_frame (GstVideoSink * sink, GstBuffer * buf) gst_d3d11_video_sink_show_frame (GstVideoSink * sink, GstBuffer * buf)
{ {
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink); GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
GstFlowReturn ret; GstFlowReturn ret = GST_FLOW_OK;
GstVideoRectangle rect = { 0, }; GstVideoRectangle rect = { 0, };
GstBuffer *fallback_buf = NULL; GstBuffer *fallback_buf = NULL;
GstStructure *stats = NULL; GstStructure *stats = NULL;
@ -988,11 +1116,33 @@ gst_d3d11_video_sink_show_frame (GstVideoSink * sink, GstBuffer * buf)
rect.w = self->video_width; rect.w = self->video_width;
rect.h = self->video_height; rect.h = self->video_height;
if (self->render_stats) if (self->draw_on_shared_texture) {
stats = gst_base_sink_get_stats (GST_BASE_SINK_CAST (self)); g_rec_mutex_lock (&self->draw_lock);
self->current_buffer = fallback_buf ? fallback_buf : buf;
self->drawing = TRUE;
GST_LOG_OBJECT (self, "Begin drawing");
/* Application should call draw method on this callback */
if (self->callbacks.begin_draw) {
self->callbacks.begin_draw (self, self->user_data);
} else {
g_signal_emit (self, gst_d3d11_video_sink_signals[SIGNAL_BEGIN_DRAW], 0,
NULL);
}
GST_LOG_OBJECT (self, "End drawing");
self->drawing = FALSE;
self->current_buffer = NULL;
g_rec_mutex_unlock (&self->draw_lock);
} else {
if (self->render_stats)
stats = gst_base_sink_get_stats (GST_BASE_SINK_CAST (self));
ret = gst_d3d11_window_render (self->window,
fallback_buf ? fallback_buf : buf, &rect, stats);
}
ret = gst_d3d11_window_render (self->window,
fallback_buf ? fallback_buf : buf, &rect, stats);
gst_clear_buffer (&fallback_buf); gst_clear_buffer (&fallback_buf);
if (ret == GST_D3D11_WINDOW_FLOW_CLOSED) { if (ret == GST_D3D11_WINDOW_FLOW_CLOSED) {
@ -1129,3 +1279,62 @@ gst_d3d11_video_sink_navigation_init (GstNavigationInterface * iface)
{ {
iface->send_event = gst_d3d11_video_sink_navigation_send_event; iface->send_event = gst_d3d11_video_sink_navigation_send_event;
} }
static gboolean
gst_d3d11_video_sink_draw_action (GstD3D11VideoSink * self,
gpointer shared_handle, guint texture_misc_flags,
guint64 acquire_key, guint64 release_key)
{
GstFlowReturn ret;
g_return_val_if_fail (shared_handle != NULL, FALSE);
if (!self->draw_on_shared_texture) {
GST_ERROR_OBJECT (self, "Invalid draw call, we are drawing on window");
return FALSE;
}
if (!shared_handle) {
GST_ERROR_OBJECT (self, "Invalid handle");
return FALSE;
}
g_rec_mutex_lock (&self->draw_lock);
if (!self->drawing || !self->current_buffer) {
GST_WARNING_OBJECT (self, "Nothing to draw");
g_rec_mutex_unlock (&self->draw_lock);
return FALSE;
}
GST_LOG_OBJECT (self, "Drawing on shared handle %p, MiscFlags: 0x%x"
", acquire key: %" G_GUINT64_FORMAT ", release key: %"
G_GUINT64_FORMAT, shared_handle, texture_misc_flags, acquire_key,
release_key);
ret = gst_d3d11_window_render_on_shared_handle (self->window,
self->current_buffer, shared_handle, texture_misc_flags, acquire_key,
release_key);
g_rec_mutex_unlock (&self->draw_lock);
return ret == GST_FLOW_OK;
}
void
gst_d3d11_video_sink_set_callbacks (GstD3D11VideoSink * videosink,
GstD3D11VideoSinkCallbacks * callbacks, gpointer user_data)
{
g_return_if_fail (GST_IS_D3D11_VIDEO_SINK (videosink));
videosink->callbacks = *callbacks;
videosink->user_data = user_data;
}
gboolean
gst_d3d11_video_sink_draw (GstD3D11VideoSink * videosink,
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
guint64 release_key)
{
g_return_val_if_fail (GST_IS_D3D11_VIDEO_SINK (videosink), FALSE);
return gst_d3d11_video_sink_draw_action (videosink, shared_handle,
texture_misc_flags, acquire_key, release_key);
}

View file

@ -1,5 +1,6 @@
/* GStreamer /* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com> * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public
@ -30,9 +31,52 @@
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_D3D11_VIDEO_SINK (gst_d3d11_video_sink_get_type()) #define GST_TYPE_D3D11_VIDEO_SINK (gst_d3d11_video_sink_get_type())
G_DECLARE_FINAL_TYPE (GstD3D11VideoSink, #define GST_D3D11_VIDEO_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_D3D11_VIDEO_SINK, GstD3D11VideoSink))
gst_d3d11_video_sink, GST, D3D11_VIDEO_SINK, GstVideoSink); #define GST_D3D11_VIDEO_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_D3D11_VIDEO_SINK, GstD3D11VideoSinkClass))
#define GST_IS_D3D11_VIDEO_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_D3D11_VIDEO_SINK))
#define GST_IS_D3D11_VIDEO_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_D3D11_VIDEO_SINK))
#define GST_D3D11_VIDEO_SINK_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_D3D11_VIDEO_SINK, GstD3D11VideoSinkClass))
typedef struct _GstD3D11VideoSink GstD3D11VideoSink;
typedef struct _GstD3D11VideoSinkClass GstD3D11VideoSinkClass;
typedef struct
{
void (*begin_draw) (GstD3D11VideoSink * videosink,
gpointer user_data);
} GstD3D11VideoSinkCallbacks;
struct _GstD3D11VideoSinkClass
{
GstVideoSinkClass parent_class;
/* signals */
void (*begin_draw) (GstD3D11VideoSink * videosink);
/* actions */
gboolean (*draw) (GstD3D11VideoSink * videosink,
gpointer shared_handle,
guint texture_misc_flags,
guint64 acquire_key,
guint64 release_key);
};
GType gst_d3d11_video_sink_get_type (void);
/* Internal methods, called by d3d11videosinkbin */
void
gst_d3d11_video_sink_set_callbacks (GstD3D11VideoSink * videosink,
GstD3D11VideoSinkCallbacks * callbacks,
gpointer user_data);
gboolean
gst_d3d11_video_sink_draw (GstD3D11VideoSink * videosink,
gpointer shared_handle,
guint texture_misc_flags,
guint64 acquire_key,
guint64 release_key);
G_END_DECLS G_END_DECLS

View file

@ -1,5 +1,6 @@
/* GStreamer /* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com> * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public
@ -56,6 +57,7 @@ enum
PROP_FULLSCREEN_TOGGLE_MODE, PROP_FULLSCREEN_TOGGLE_MODE,
PROP_FULLSCREEN, PROP_FULLSCREEN,
PROP_RENDER_STATS, PROP_RENDER_STATS,
PROP_DRAW_ON_SHARED_TEXTURE,
}; };
/* basesink */ /* basesink */
@ -82,6 +84,20 @@ enum
#define DEFAULT_FULLSCREEN_TOGGLE_MODE GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_NONE #define DEFAULT_FULLSCREEN_TOGGLE_MODE GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_NONE
#define DEFAULT_FULLSCREEN FALSE #define DEFAULT_FULLSCREEN FALSE
#define DEFAULT_RENDER_STATS FALSE #define DEFAULT_RENDER_STATS FALSE
#define DEFAULT_DRAW_ON_SHARED_TEXTURE FALSE
enum
{
/* signals */
SIGNAL_BEGIN_DRAW,
/* actions */
SIGNAL_DRAW,
LAST_SIGNAL
};
static guint gst_d3d11_video_sink_bin_signals[LAST_SIGNAL] = { 0, };
static GstStaticCaps pad_template_caps = static GstStaticCaps pad_template_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
@ -118,6 +134,12 @@ static void
gst_d3d11_video_sink_bin_video_overlay_init (GstVideoOverlayInterface * iface); gst_d3d11_video_sink_bin_video_overlay_init (GstVideoOverlayInterface * iface);
static void static void
gst_d3d11_video_sink_bin_navigation_init (GstNavigationInterface * iface); gst_d3d11_video_sink_bin_navigation_init (GstNavigationInterface * iface);
static void gst_d311_video_sink_bin_on_begin_draw (GstD3D11VideoSink * sink,
gpointer self);
static gboolean
gst_d3d11_video_sink_bin_draw_action (GstD3D11VideoSinkBin * self,
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
guint64 release_key);
#define gst_d3d11_video_sink_bin_parent_class parent_class #define gst_d3d11_video_sink_bin_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstD3D11VideoSinkBin, gst_d3d11_video_sink_bin, G_DEFINE_TYPE_WITH_CODE (GstD3D11VideoSinkBin, gst_d3d11_video_sink_bin,
@ -242,6 +264,33 @@ gst_d3d11_video_sink_bin_class_init (GstD3D11VideoSinkBinClass * klass)
GST_PARAM_CONDITIONALLY_AVAILABLE | GST_PARAM_MUTABLE_READY | GST_PARAM_CONDITIONALLY_AVAILABLE | GST_PARAM_MUTABLE_READY |
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif #endif
g_object_class_install_property (gobject_class, PROP_DRAW_ON_SHARED_TEXTURE,
g_param_spec_boolean ("draw-on-shared-texture",
"Draw on shared texture",
"Draw on user provided shared texture instead of window. "
"When enabled, user can pass application's own texture to sink "
"by using \"draw\" action signal on \"begin-draw\" signal handler, "
"so that sink can draw video data on application's texture. "
"Supported texture formats for user texture are "
"DXGI_FORMAT_R8G8B8A8_UNORM, DXGI_FORMAT_B8G8R8A8_UNORM, and "
"DXGI_FORMAT_R10G10B10A2_UNORM.",
DEFAULT_DRAW_ON_SHARED_TEXTURE,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
gst_d3d11_video_sink_bin_signals[SIGNAL_BEGIN_DRAW] =
g_signal_new ("begin-draw", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstD3D11VideoSinkBinClass, begin_draw),
NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
gst_d3d11_video_sink_bin_signals[SIGNAL_DRAW] =
g_signal_new ("draw", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstD3D11VideoSinkBinClass, draw), NULL, NULL, NULL,
G_TYPE_BOOLEAN, 4, G_TYPE_POINTER, G_TYPE_UINT, G_TYPE_UINT64,
G_TYPE_UINT64);
klass->draw = gst_d3d11_video_sink_bin_draw_action;
gst_element_class_set_static_metadata (element_class, gst_element_class_set_static_metadata (element_class,
"Direct3D11 video sink bin", "Sink/Video", "Direct3D11 video sink bin", "Sink/Video",
@ -258,6 +307,7 @@ static void
gst_d3d11_video_sink_bin_init (GstD3D11VideoSinkBin * self) gst_d3d11_video_sink_bin_init (GstD3D11VideoSinkBin * self)
{ {
GstPad *pad; GstPad *pad;
GstD3D11VideoSinkCallbacks callbacks;
self->upload = gst_element_factory_make ("d3d11upload", NULL); self->upload = gst_element_factory_make ("d3d11upload", NULL);
if (!self->upload) { if (!self->upload) {
@ -272,6 +322,10 @@ gst_d3d11_video_sink_bin_init (GstD3D11VideoSinkBin * self)
return; return;
} }
callbacks.begin_draw = gst_d311_video_sink_bin_on_begin_draw;
gst_d3d11_video_sink_set_callbacks (GST_D3D11_VIDEO_SINK (self->sink),
&callbacks, self);
gst_bin_add_many (GST_BIN (self), self->upload, self->sink, NULL); gst_bin_add_many (GST_BIN (self), self->upload, self->sink, NULL);
gst_element_link_many (self->upload, self->sink, NULL); gst_element_link_many (self->upload, self->sink, NULL);
@ -309,6 +363,28 @@ gst_d3d11_video_sink_bin_get_property (GObject * object, guint prop_id,
g_object_get_property (G_OBJECT (self->sink), pspec->name, value); g_object_get_property (G_OBJECT (self->sink), pspec->name, value);
} }
static void
gst_d311_video_sink_bin_on_begin_draw (GstD3D11VideoSink * sink, gpointer self)
{
g_signal_emit (self, gst_d3d11_video_sink_bin_signals[SIGNAL_BEGIN_DRAW], 0,
NULL);
}
static gboolean
gst_d3d11_video_sink_bin_draw_action (GstD3D11VideoSinkBin * self,
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
guint64 release_key)
{
if (!self->sink) {
GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND,
("D3D11VideoSink element wasn't configured"), (NULL));
return FALSE;
}
return gst_d3d11_video_sink_draw (GST_D3D11_VIDEO_SINK (self->sink),
shared_handle, texture_misc_flags, acquire_key, release_key);
}
/* VideoOverlay interface */ /* VideoOverlay interface */
static void static void
gst_d3d11_video_sink_bin_set_window_handle (GstVideoOverlay * overlay, gst_d3d11_video_sink_bin_set_window_handle (GstVideoOverlay * overlay,

View file

@ -1,5 +1,6 @@
/* GStreamer /* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com> * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public
@ -24,9 +25,32 @@
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_D3D11_VIDEO_SINK_BIN (gst_d3d11_video_sink_bin_get_type()) #define GST_TYPE_D3D11_VIDEO_SINK_BIN (gst_d3d11_video_sink_bin_get_type())
G_DECLARE_FINAL_TYPE (GstD3D11VideoSinkBin, #define GST_D3D11_VIDEO_SINK_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_D3D11_VIDEO_SINK_BIN, GstD3D11VideoSinkBin))
gst_d3d11_video_sink_bin, GST, D3D11_VIDEO_SINK_BIN, GstBin); #define GST_D3D11_VIDEO_SINK_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_D3D11_VIDEO_SINK_BIN, GstD3D11VideoSinkBinClass))
#define GST_IS_D3D11_VIDEO_SINK_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_D3D11_VIDEO_SINK_BIN))
#define GST_IS_D3D11_VIDEO_SINK_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_D3D11_VIDEO_SINK_BIN))
#define GST_D3D11_VIDEO_SINK_BIN_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_D3D11_VIDEO_SINK_BIN, GstD3D11VideoSinkBinClass))
typedef struct _GstD3D11VideoSinkBin GstD3D11VideoSinkBin;
typedef struct _GstD3D11VideoSinkBinClass GstD3D11VideoSinkBinClass;
struct _GstD3D11VideoSinkBinClass
{
GstBinClass parent_class;
/* signals */
void (*begin_draw) (GstD3D11VideoSinkBin * videosink);
/* actions */
gboolean (*draw) (GstD3D11VideoSinkBin * videosink,
gpointer shared_handle,
guint texture_misc_flags,
guint64 acquire_key,
guint64 release_key);
};
GType gst_d3d11_video_sink_bin_get_type (void);
G_END_DECLS G_END_DECLS

View file

@ -126,9 +126,13 @@ static void gst_d3d11_window_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
static void gst_d3d11_window_dispose (GObject * object); static void gst_d3d11_window_dispose (GObject * object);
static GstFlowReturn gst_d3d111_window_present (GstD3D11Window * self, static GstFlowReturn gst_d3d111_window_present (GstD3D11Window * self,
GstBuffer * buffer, GstStructure * stats); GstBuffer * buffer, GstStructure * stats,
ID3D11VideoProcessorOutputView *pov, ID3D11RenderTargetView * rtv);
static void gst_d3d11_window_on_resize_default (GstD3D11Window * window, static void gst_d3d11_window_on_resize_default (GstD3D11Window * window,
guint width, guint height); guint width, guint height);
static gboolean gst_d3d11_window_prepare_default (GstD3D11Window * window,
guint display_width, guint display_height, GstCaps * caps,
gboolean * video_processor_available, GError ** error);
static void static void
gst_d3d11_window_class_init (GstD3D11WindowClass * klass) gst_d3d11_window_class_init (GstD3D11WindowClass * klass)
@ -140,6 +144,7 @@ gst_d3d11_window_class_init (GstD3D11WindowClass * klass)
gobject_class->dispose = gst_d3d11_window_dispose; gobject_class->dispose = gst_d3d11_window_dispose;
klass->on_resize = GST_DEBUG_FUNCPTR (gst_d3d11_window_on_resize_default); klass->on_resize = GST_DEBUG_FUNCPTR (gst_d3d11_window_on_resize_default);
klass->prepare = GST_DEBUG_FUNCPTR (gst_d3d11_window_prepare_default);
g_object_class_install_property (gobject_class, PROP_D3D11_DEVICE, g_object_class_install_property (gobject_class, PROP_D3D11_DEVICE,
g_param_spec_object ("d3d11device", "D3D11 Device", g_param_spec_object ("d3d11device", "D3D11 Device",
@ -582,7 +587,10 @@ gst_d3d11_window_on_resize_default (GstD3D11Window * window, guint width,
window->first_present = TRUE; window->first_present = TRUE;
/* redraw the last scene if cached buffer exits */ /* redraw the last scene if cached buffer exits */
gst_d3d111_window_present (window, NULL, NULL); if (window->cached_buffer) {
gst_d3d111_window_present (window, window->cached_buffer, NULL,
window->pov, window->rtv);
}
done: done:
if (backbuffer) if (backbuffer)
@ -623,13 +631,31 @@ typedef struct
gboolean supported; gboolean supported;
} GstD3D11WindowDisplayFormat; } GstD3D11WindowDisplayFormat;
gboolean gboolean
gst_d3d11_window_prepare (GstD3D11Window * window, guint display_width, gst_d3d11_window_prepare (GstD3D11Window * window, guint display_width,
guint display_height, GstCaps * caps, gboolean * video_processor_available, guint display_height, GstCaps * caps, gboolean * video_processor_available,
GError ** error) GError ** error)
{ {
GstD3D11WindowClass *klass; GstD3D11WindowClass *klass;
g_return_val_if_fail (GST_IS_D3D11_WINDOW (window), FALSE);
klass = GST_D3D11_WINDOW_GET_CLASS (window);
g_assert (klass->prepare != NULL);
GST_DEBUG_OBJECT (window, "Prepare window, display resolution %dx%d, caps %"
GST_PTR_FORMAT, display_width, display_height, caps);
return klass->prepare (window, display_width, display_height, caps,
video_processor_available, error);
}
static gboolean
gst_d3d11_window_prepare_default (GstD3D11Window * window, guint display_width,
guint display_height, GstCaps * caps, gboolean * video_processor_available,
GError ** error)
{
GstD3D11WindowClass *klass;
guint swapchain_flags = 0; guint swapchain_flags = 0;
ID3D11Device *device_handle; ID3D11Device *device_handle;
guint i; guint i;
@ -654,11 +680,6 @@ gst_d3d11_window_prepare (GstD3D11Window * window, guint display_width,
DXGI_HDR_METADATA_HDR10 hdr10_metadata = { 0, }; DXGI_HDR_METADATA_HDR10 hdr10_metadata = { 0, };
#endif #endif
g_return_val_if_fail (GST_IS_D3D11_WINDOW (window), FALSE);
GST_DEBUG_OBJECT (window, "Prepare window, display resolution %dx%d, caps %"
GST_PTR_FORMAT, display_width, display_height, caps);
/* Step 1: Clear old resources and objects */ /* Step 1: Clear old resources and objects */
gst_clear_buffer (&window->cached_buffer); gst_clear_buffer (&window->cached_buffer);
g_clear_pointer (&window->processor, gst_d3d11_video_processor_free); g_clear_pointer (&window->processor, gst_d3d11_video_processor_free);
@ -1079,17 +1100,17 @@ gst_d3d11_window_present_d2d (GstD3D11Window * self, GstStructure * stats)
static GstFlowReturn static GstFlowReturn
gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer, gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
GstStructure * stats) GstStructure * stats, ID3D11VideoProcessorOutputView *pov,
ID3D11RenderTargetView * rtv)
{ {
GstD3D11WindowClass *klass = GST_D3D11_WINDOW_GET_CLASS (self); GstD3D11WindowClass *klass = GST_D3D11_WINDOW_GET_CLASS (self);
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
guint present_flags = 0; guint present_flags = 0;
if (buffer) { if (!buffer)
gst_buffer_replace (&self->cached_buffer, buffer); return GST_FLOW_OK;
}
if (self->cached_buffer) { {
GstMapInfo infos[GST_VIDEO_MAX_PLANES]; GstMapInfo infos[GST_VIDEO_MAX_PLANES];
ID3D11ShaderResourceView *srv[GST_VIDEO_MAX_PLANES]; ID3D11ShaderResourceView *srv[GST_VIDEO_MAX_PLANES];
ID3D11VideoProcessorInputView *piv = NULL; ID3D11VideoProcessorInputView *piv = NULL;
@ -1097,16 +1118,16 @@ gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
gst_d3d11_device_get_device_handle (self->device); gst_d3d11_device_get_device_handle (self->device);
/* Map memory in any case so that we can upload pending stage texture */ /* Map memory in any case so that we can upload pending stage texture */
if (!gst_d3d11_buffer_map (self->cached_buffer, device_handle, if (!gst_d3d11_buffer_map (buffer, device_handle,
infos, GST_MAP_READ)) { infos, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Couldn't map buffer"); GST_ERROR_OBJECT (self, "Couldn't map buffer");
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
if (!gst_d3d11_buffer_get_shader_resource_view (self->cached_buffer, srv)) { if (!gst_d3d11_buffer_get_shader_resource_view (buffer, srv)) {
if (!gst_d3d11_window_buffer_ensure_processor_input (self, if (!gst_d3d11_window_buffer_ensure_processor_input (self,
self->cached_buffer, &piv)) { buffer, &piv)) {
GST_ERROR_OBJECT (self, "Input texture cannot be used for converter"); GST_ERROR_OBJECT (self, "Input texture cannot be used for converter");
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
@ -1127,9 +1148,9 @@ gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
&viewport); &viewport);
} }
if (self->processor && piv && self->pov) { if (self->processor && piv && pov) {
if (!gst_d3d11_video_processor_render_unlocked (self->processor, if (!gst_d3d11_video_processor_render_unlocked (self->processor,
&self->input_rect, piv, &self->render_rect, self->pov)) { &self->input_rect, piv, &self->render_rect, pov)) {
GST_ERROR_OBJECT (self, "Couldn't render to backbuffer using processor"); GST_ERROR_OBJECT (self, "Couldn't render to backbuffer using processor");
ret = GST_FLOW_ERROR; ret = GST_FLOW_ERROR;
goto unmap_and_out; goto unmap_and_out;
@ -1138,7 +1159,7 @@ gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
} }
} else { } else {
if (!gst_d3d11_color_converter_convert_unlocked (self->converter, if (!gst_d3d11_color_converter_convert_unlocked (self->converter,
srv, &self->rtv, NULL, NULL)) { srv, &rtv, NULL, NULL)) {
GST_ERROR_OBJECT (self, "Couldn't render to backbuffer using converter"); GST_ERROR_OBJECT (self, "Couldn't render to backbuffer using converter");
ret = GST_FLOW_ERROR; ret = GST_FLOW_ERROR;
goto unmap_and_out; goto unmap_and_out;
@ -1147,8 +1168,8 @@ gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
} }
} }
gst_d3d11_overlay_compositor_upload (self->compositor, self->cached_buffer); gst_d3d11_overlay_compositor_upload (self->compositor, buffer);
gst_d3d11_overlay_compositor_draw_unlocked (self->compositor, &self->rtv); gst_d3d11_overlay_compositor_draw_unlocked (self->compositor, &rtv);
#if (DXGI_HEADER_VERSION >= 5) #if (DXGI_HEADER_VERSION >= 5)
if (self->allow_tearing && self->fullscreen) { if (self->allow_tearing && self->fullscreen) {
@ -1160,12 +1181,13 @@ gst_d3d111_window_present (GstD3D11Window * self, GstBuffer * buffer,
gst_d3d11_window_present_d2d (self, stats); gst_d3d11_window_present_d2d (self, stats);
#endif #endif
ret = klass->present (self, present_flags); if (klass->present)
ret = klass->present (self, present_flags);
self->first_present = FALSE; self->first_present = FALSE;
unmap_and_out: unmap_and_out:
gst_d3d11_buffer_unmap (self->cached_buffer, infos); gst_d3d11_buffer_unmap (buffer, infos);
} }
return ret; return ret;
@ -1192,7 +1214,10 @@ gst_d3d11_window_render (GstD3D11Window * window, GstBuffer * buffer,
} }
gst_d3d11_device_lock (window->device); gst_d3d11_device_lock (window->device);
ret = gst_d3d111_window_present (window, buffer, stats); gst_buffer_replace (&window->cached_buffer, buffer);
ret = gst_d3d111_window_present (window, window->cached_buffer, stats,
window->pov, window->rtv);
gst_d3d11_device_unlock (window->device); gst_d3d11_device_unlock (window->device);
if (stats) if (stats)
@ -1201,6 +1226,61 @@ gst_d3d11_window_render (GstD3D11Window * window, GstBuffer * buffer,
return ret; return ret;
} }
GstFlowReturn
gst_d3d11_window_render_on_shared_handle (GstD3D11Window * window,
GstBuffer * buffer, HANDLE shared_handle, guint texture_misc_flags,
guint64 acquire_key, guint64 release_key)
{
GstD3D11WindowClass *klass;
GstMemory *mem;
GstFlowReturn ret = GST_FLOW_OK;
GstD3D11WindowSharedHandleData data = { NULL, };
ID3D11VideoProcessorOutputView *pov = NULL;
ID3D11RenderTargetView *rtv = NULL;
g_return_val_if_fail (GST_IS_D3D11_WINDOW (window), GST_FLOW_ERROR);
klass = GST_D3D11_WINDOW_GET_CLASS (window);
g_assert (klass->open_shared_handle != NULL);
g_assert (klass->release_shared_handle != NULL);
mem = gst_buffer_peek_memory (buffer, 0);
if (!gst_is_d3d11_memory (mem)) {
GST_ERROR_OBJECT (window, "Invalid buffer");
return GST_FLOW_ERROR;
}
data.shared_handle = shared_handle;
data.texture_misc_flags = texture_misc_flags;
data.acquire_key = acquire_key;
data.release_key = release_key;
gst_d3d11_device_lock (window->device);
if (!klass->open_shared_handle (window, &data)) {
GST_ERROR_OBJECT (window, "Couldn't open shared handle");
gst_d3d11_device_unlock (window->device);
return GST_FLOW_OK;
}
if (data.fallback_rtv) {
rtv = data.fallback_rtv;
pov = data.fallback_pov;
} else {
rtv = data.rtv;
pov = data.pov;
}
ret = gst_d3d111_window_present (window, buffer, NULL,
pov, rtv);
klass->release_shared_handle (window, &data);
gst_d3d11_device_unlock (window->device);
return ret;
}
gboolean gboolean
gst_d3d11_window_unlock (GstD3D11Window * window) gst_d3d11_window_unlock (GstD3D11Window * window)
{ {

View file

@ -63,6 +63,22 @@ typedef enum
GST_D3D11_WINDOW_NATIVE_TYPE_SWAP_CHAIN_PANEL, GST_D3D11_WINDOW_NATIVE_TYPE_SWAP_CHAIN_PANEL,
} GstD3D11WindowNativeType; } GstD3D11WindowNativeType;
typedef struct
{
HANDLE shared_handle;
guint texture_misc_flags;
guint64 acquire_key;
guint64 release_key;
ID3D11Texture2D *texture;
IDXGIKeyedMutex *keyed_mutex;
ID3D11VideoProcessorOutputView *pov;
ID3D11RenderTargetView *rtv;
ID3D11VideoProcessorOutputView *fallback_pov;
ID3D11RenderTargetView *fallback_rtv;
} GstD3D11WindowSharedHandleData;
struct _GstD3D11Window struct _GstD3D11Window
{ {
GstObject parent; GstObject parent;
@ -138,7 +154,20 @@ struct _GstD3D11WindowClass
guint width, guint width,
guint height); guint height);
gboolean (*prepare) (GstD3D11Window * window,
guint display_width,
guint display_height,
GstCaps * caps,
gboolean * video_processor_available,
GError ** error);
void (*unprepare) (GstD3D11Window * window); void (*unprepare) (GstD3D11Window * window);
gboolean (*open_shared_handle) (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data);
gboolean (*release_shared_handle) (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data);
}; };
GType gst_d3d11_window_get_type (void); GType gst_d3d11_window_get_type (void);
@ -161,6 +190,13 @@ GstFlowReturn gst_d3d11_window_render (GstD3D11Window * window,
GstVideoRectangle * src_rect, GstVideoRectangle * src_rect,
GstStructure * stats); GstStructure * stats);
GstFlowReturn gst_d3d11_window_render_on_shared_handle (GstD3D11Window * window,
GstBuffer * buffer,
HANDLE shared_handle,
guint texture_misc_flags,
guint64 acquire_key,
guint64 release_key);
gboolean gst_d3d11_window_unlock (GstD3D11Window * window); gboolean gst_d3d11_window_unlock (GstD3D11Window * window);
gboolean gst_d3d11_window_unlock_stop (GstD3D11Window * window); gboolean gst_d3d11_window_unlock_stop (GstD3D11Window * window);

View file

@ -0,0 +1,554 @@
/*
* GStreamer
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstd3d11window_dummy.h"
#include <wrl.h>
using namespace Microsoft::WRL;
G_BEGIN_DECLS
GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_window_debug);
#define GST_CAT_DEFAULT gst_d3d11_window_debug
G_END_DECLS
struct _GstD3D11WindowDummy
{
GstD3D11Window parent;
ID3D11Texture2D *fallback_texture;
ID3D11VideoProcessorOutputView *fallback_pov;
ID3D11RenderTargetView *fallback_rtv;
};
#define gst_d3d11_window_dummy_parent_class parent_class
G_DEFINE_TYPE (GstD3D11WindowDummy, gst_d3d11_window_dummy,
GST_TYPE_D3D11_WINDOW);
static void gst_d3d11_window_dummy_on_resize (GstD3D11Window * window,
guint width, guint height);
static gboolean gst_d3d11_window_dummy_prepare (GstD3D11Window * window,
guint display_width, guint display_height, GstCaps * caps,
gboolean * video_processor_available, GError ** error);
static void gst_d3d11_window_dummy_unprepare (GstD3D11Window * window);
static gboolean
gst_d3d11_window_dummy_open_shared_handle (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data);
static gboolean
gst_d3d11_window_dummy_release_shared_handle (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data);
static void
gst_d3d11_window_dummy_class_init (GstD3D11WindowDummyClass * klass)
{
GstD3D11WindowClass *window_class = GST_D3D11_WINDOW_CLASS (klass);
window_class->on_resize =
GST_DEBUG_FUNCPTR (gst_d3d11_window_dummy_on_resize);
window_class->prepare =
GST_DEBUG_FUNCPTR (gst_d3d11_window_dummy_prepare);
window_class->unprepare =
GST_DEBUG_FUNCPTR (gst_d3d11_window_dummy_unprepare);
window_class->open_shared_handle =
GST_DEBUG_FUNCPTR (gst_d3d11_window_dummy_open_shared_handle);
window_class->release_shared_handle =
GST_DEBUG_FUNCPTR (gst_d3d11_window_dummy_release_shared_handle);
}
static void
gst_d3d11_window_dummy_init (GstD3D11WindowDummy * self)
{
}
static gboolean
gst_d3d11_window_dummy_prepare (GstD3D11Window * window,
guint display_width, guint display_height, GstCaps * caps,
gboolean * video_processor_available, GError ** error)
{
g_clear_pointer (&window->processor, gst_d3d11_video_processor_free);
g_clear_pointer (&window->converter, gst_d3d11_color_converter_free);
g_clear_pointer (&window->compositor, gst_d3d11_overlay_compositor_free);
/* We are supporting only RGBA, BGRA or RGB10A2_LE formats but we don't know
* which format texture will be used at this moment */
gst_video_info_from_caps (&window->info, caps);
window->render_rect.left = 0;
window->render_rect.top = 0;
window->render_rect.right = display_width;
window->render_rect.bottom = display_height;
window->input_rect.left = 0;
window->input_rect.top = 0;
window->input_rect.right = GST_VIDEO_INFO_WIDTH (&window->info);
window->input_rect.bottom = GST_VIDEO_INFO_HEIGHT (&window->info);
gst_video_info_set_format (&window->render_info,
GST_VIDEO_FORMAT_BGRA, display_width, display_height);
/* TODO: not sure which colorspace should be used, let's use BT709 since
* it's default and most common one */
window->render_info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
window->render_info.colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
window->render_info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
gst_d3d11_device_lock (window->device);
#if (DXGI_HEADER_VERSION >= 4)
{
const GstDxgiColorSpace *in_color_space =
gst_d3d11_video_info_to_dxgi_color_space (&window->info);
const GstD3D11Format *in_format =
gst_d3d11_device_format_from_gst (window->device,
GST_VIDEO_INFO_FORMAT (&window->info));
gboolean hardware = FALSE;
GstD3D11VideoProcessor *processor = NULL;
guint i;
DXGI_FORMAT formats_to_check[] = {
DXGI_FORMAT_R8G8B8A8_UNORM,
DXGI_FORMAT_B8G8R8A8_UNORM,
DXGI_FORMAT_R10G10B10A2_UNORM
};
if (in_color_space && in_format &&
in_format->dxgi_format != DXGI_FORMAT_UNKNOWN) {
g_object_get (window->device, "hardware", &hardware, NULL);
}
if (hardware) {
processor =
gst_d3d11_video_processor_new (window->device,
GST_VIDEO_INFO_WIDTH (&window->info),
GST_VIDEO_INFO_HEIGHT (&window->info),
display_width, display_height);
}
/* Check if video processor can support all possible output dxgi formats */
for (i = 0; i < G_N_ELEMENTS (formats_to_check) && processor; i++) {
DXGI_FORMAT in_dxgi_format = in_format->dxgi_format;
DXGI_FORMAT out_dxgi_format = formats_to_check[i];
DXGI_COLOR_SPACE_TYPE in_dxgi_color_space =
(DXGI_COLOR_SPACE_TYPE) in_color_space->dxgi_color_space_type;
if (!gst_d3d11_video_processor_check_format_conversion (processor,
in_dxgi_format, in_dxgi_color_space, out_dxgi_format,
DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709)) {
GST_DEBUG_OBJECT (window, "Conversion is not supported by device");
g_clear_pointer (&processor, gst_d3d11_video_processor_free);
break;
}
}
if (processor) {
gst_d3d11_video_processor_set_input_dxgi_color_space (processor,
(DXGI_COLOR_SPACE_TYPE) in_color_space->dxgi_color_space_type);
gst_d3d11_video_processor_set_output_dxgi_color_space (processor,
DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709);
}
window->processor = processor;
}
#endif
*video_processor_available = !!window->processor;
window->converter =
gst_d3d11_color_converter_new (window->device, &window->info,
&window->render_info);
if (!window->converter) {
GST_ERROR_OBJECT (window, "Cannot create converter");
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_FAILED,
"Cannot create converter");
goto error;
}
window->compositor =
gst_d3d11_overlay_compositor_new (window->device, &window->render_info);
if (!window->compositor) {
GST_ERROR_OBJECT (window, "Cannot create overlay compositor");
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_FAILED,
"Cannot create overlay compositor");
goto error;
}
gst_d3d11_device_unlock (window->device);
return TRUE;
error:
gst_d3d11_device_unlock (window->device);
return FALSE;
}
static void
gst_d3d11_window_dummy_clear_resources (GstD3D11WindowDummy * self)
{
if (self->fallback_pov) {
self->fallback_pov->Release ();
self->fallback_pov = nullptr;
}
if (self->fallback_rtv) {
self->fallback_rtv->Release ();
self->fallback_rtv = nullptr;
}
if (self->fallback_texture) {
self->fallback_texture->Release ();
self->fallback_texture = nullptr;
}
}
static void
gst_d3d11_window_dummy_unprepare (GstD3D11Window * window)
{
GstD3D11WindowDummy *self = GST_D3D11_WINDOW_DUMMY (window);
gst_d3d11_window_dummy_clear_resources (self);
}
static void
gst_d3d11_window_dummy_on_resize (GstD3D11Window * window,
guint width, guint height)
{
GstVideoRectangle src_rect, dst_rect, rst_rect;
dst_rect.x = 0;
dst_rect.y = 0;
dst_rect.w = width;
dst_rect.h = height;
if (window->force_aspect_ratio) {
src_rect.x = 0;
src_rect.y = 0;
src_rect.w = GST_VIDEO_INFO_WIDTH (&window->render_info);
src_rect.h = GST_VIDEO_INFO_HEIGHT (&window->render_info);
gst_video_sink_center_rect (src_rect, dst_rect, &rst_rect, TRUE);
} else {
rst_rect = dst_rect;
}
window->render_rect.left = rst_rect.x;
window->render_rect.top = rst_rect.y;
window->render_rect.right = rst_rect.x + rst_rect.w;
window->render_rect.bottom = rst_rect.y + rst_rect.h;
window->first_present = TRUE;
}
static gboolean
gst_d3d11_window_dummy_setup_fallback_texture (GstD3D11Window * window,
D3D11_TEXTURE2D_DESC * shared_desc)
{
GstD3D11WindowDummy *self = GST_D3D11_WINDOW_DUMMY (window);
D3D11_TEXTURE2D_DESC desc = { 0, };
D3D11_RENDER_TARGET_VIEW_DESC rtv_desc;
ID3D11Device *device_handle =
gst_d3d11_device_get_device_handle (window->device);
gboolean need_new_texture = FALSE;
HRESULT hr;
if (!self->fallback_texture) {
GST_DEBUG_OBJECT (self,
"We have no configured fallback texture, create new one");
need_new_texture = TRUE;
} else {
self->fallback_texture->GetDesc (&desc);
if (shared_desc->Format != desc.Format) {
GST_DEBUG_OBJECT (self, "Texture formats are different, create new one");
need_new_texture = TRUE;
} else if (shared_desc->Width > desc.Width ||
shared_desc->Height > desc.Height) {
GST_DEBUG_OBJECT (self, "Needs larger size of fallback texture");
need_new_texture = TRUE;
}
}
if (!need_new_texture)
return TRUE;
gst_d3d11_window_dummy_clear_resources (self);
desc.Width = shared_desc->Width;
desc.Height = shared_desc->Height;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = shared_desc->Format;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
hr = device_handle->CreateTexture2D (&desc, NULL, &self->fallback_texture);
if (!gst_d3d11_result (hr, window->device)) {
GST_ERROR_OBJECT (self, "Couldn't create fallback texture");
return FALSE;
}
rtv_desc.Format = DXGI_FORMAT_UNKNOWN;
rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
rtv_desc.Texture2D.MipSlice = 0;
hr = device_handle->CreateRenderTargetView (self->fallback_texture, &rtv_desc,
&self->fallback_rtv);
if (!gst_d3d11_result (hr, window->device)) {
GST_ERROR_OBJECT (self,
"Couldn't get render target view from fallback texture");
gst_d3d11_window_dummy_clear_resources (self);
return FALSE;
}
if (window->processor) {
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC pov_desc;
pov_desc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
pov_desc.Texture2D.MipSlice = 0;
if (!gst_d3d11_video_processor_create_output_view (window->processor,
&pov_desc, (ID3D11Resource *) self->fallback_texture,
&self->fallback_pov)) {
GST_ERROR_OBJECT (window,
"ID3D11VideoProcessorOutputView is unavailable");
gst_d3d11_window_dummy_clear_resources (self);
return FALSE;
}
}
return TRUE;
}
static gboolean
gst_d3d11_window_dummy_open_shared_handle (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data)
{
GstD3D11WindowDummy *self = GST_D3D11_WINDOW_DUMMY (window);
GstD3D11Device *device = window->device;
ID3D11Device *device_handle;
HRESULT hr;
ID3D11Texture2D *texture = NULL;
IDXGIKeyedMutex *keyed_mutex = NULL;
ID3D11VideoProcessorOutputView *pov = NULL;
ID3D11RenderTargetView *rtv = NULL;
D3D11_TEXTURE2D_DESC desc;
gboolean use_keyed_mutex = FALSE;
gboolean need_fallback_texture = FALSE;
device_handle = gst_d3d11_device_get_device_handle (device);
if ((data->texture_misc_flags & D3D11_RESOURCE_MISC_SHARED_NTHANDLE) ==
D3D11_RESOURCE_MISC_SHARED_NTHANDLE) {
ComPtr<ID3D11Device1> device1_handle;
hr = device_handle->QueryInterface (IID_PPV_ARGS (&device1_handle));
if (!gst_d3d11_result (hr, device))
return FALSE;
hr = device1_handle->OpenSharedResource1 (data->shared_handle,
IID_PPV_ARGS (&texture));
} else {
hr = device_handle->OpenSharedResource (data->shared_handle,
IID_PPV_ARGS (&texture));
}
if (!gst_d3d11_result (hr, device))
return FALSE;
texture->GetDesc (&desc);
use_keyed_mutex = (desc.MiscFlags & D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX) ==
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
if (use_keyed_mutex) {
hr = texture->QueryInterface (IID_PPV_ARGS (&keyed_mutex));
if (!gst_d3d11_result (hr, device))
goto out;
}
if (window->processor) {
if (use_keyed_mutex) {
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC pov_desc;
pov_desc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
pov_desc.Texture2D.MipSlice = 0;
if (!gst_d3d11_video_processor_create_output_view (window->processor,
&pov_desc, (ID3D11Resource *) texture, &pov)) {
GST_WARNING_OBJECT (window,
"ID3D11VideoProcessorOutputView is unavailable");
}
} else {
/* HACK: If external texture was created without keyed mutext
* and we need to used videoprocessor to convert decoder output texture
* to external texture, converted texture by videoprocessor seems to be broken
* Probably that's because of missing flush/sync API around videoprocessor.
* (e.g., ID3D11VideoContext and ID3D11VideoProcessor have no
* flushing api such as ID3D11DeviceContext::Flush).
* To workaround the case, we need to use fallback texture and copy back
* to external texture
*/
need_fallback_texture = TRUE;
GST_TRACE_OBJECT (window,
"We are using video processor but keyed mutex is unavailable");
if (!gst_d3d11_window_dummy_setup_fallback_texture (window, &desc))
goto out;
}
}
hr = device_handle->CreateRenderTargetView ((ID3D11Resource *) texture,
NULL, &rtv);
if (!gst_d3d11_result (hr, device))
goto out;
if (keyed_mutex) {
hr = keyed_mutex->AcquireSync(data->acquire_key, INFINITE);
if (!gst_d3d11_result (hr, device))
goto out;
}
/* Everything is prepared now */
gst_d3d11_window_dummy_on_resize (window, desc.Width, desc.Height);
/* Move owned resources */
data->texture = texture;
data->keyed_mutex = keyed_mutex;
data->pov = pov;
data->rtv = rtv;
if (need_fallback_texture) {
data->fallback_pov = self->fallback_pov;
data->fallback_rtv = self->fallback_rtv;
} else {
data->fallback_pov = nullptr;
data->fallback_rtv = nullptr;
}
return TRUE;
out:
if (texture)
texture->Release ();
if (keyed_mutex)
keyed_mutex->Release ();
if (pov)
pov->Release ();
if (rtv)
rtv->Release ();
return FALSE;
}
static gboolean
gst_d3d11_window_dummy_release_shared_handle (GstD3D11Window * window,
GstD3D11WindowSharedHandleData * data)
{
GstD3D11WindowDummy *self = GST_D3D11_WINDOW_DUMMY (window);
GstD3D11Device *device = window->device;
HRESULT hr;
/* TODO: cache owned resource for the later reuse? */
if (data->keyed_mutex) {
hr = data->keyed_mutex->ReleaseSync (data->release_key);
gst_d3d11_result (hr, device);
data->keyed_mutex->Release ();
} else {
ComPtr<ID3D11Query> query;
D3D11_QUERY_DESC query_desc;
ID3D11Device *device_handle = gst_d3d11_device_get_device_handle (device);
ID3D11DeviceContext *context_handle =
gst_d3d11_device_get_device_context_handle (device);
BOOL sync_done = FALSE;
/* If keyed mutex is not used, let's handle sync manually by using
* ID3D11Query. Issued GPU commands might not be finished yet */
query_desc.Query = D3D11_QUERY_EVENT;
query_desc.MiscFlags = 0;
hr = device_handle->CreateQuery (&query_desc, &query);
if (!gst_d3d11_result (hr, device)) {
GST_ERROR_OBJECT (self, "Couldn't Create event query");
return FALSE;
}
/* Copy from fallback texture to user's texture */
if (data->fallback_rtv) {
D3D11_BOX src_box;
D3D11_TEXTURE2D_DESC desc;
ID3D11DeviceContext * context_handle =
gst_d3d11_device_get_device_context_handle (device);
data->texture->GetDesc (&desc);
src_box.left = 0;
src_box.top = 0;
src_box.front = 0;
src_box.back = 1;
src_box.right = desc.Width;
src_box.bottom = desc.Height;
context_handle->CopySubresourceRegion (data->texture, 0, 0, 0, 0,
self->fallback_texture, 0, &src_box);
}
context_handle->End (query.Get());
/* Wait until all issued GPU commands are finished */
do {
context_handle->GetData (query.Get(), &sync_done, sizeof (BOOL), 0);
} while (!sync_done && (hr == S_OK || hr == S_FALSE));
if (!gst_d3d11_result (hr, device)) {
GST_ERROR_OBJECT (self, "Couldn't sync GPU operation");
return FALSE;
}
}
if (data->rtv)
data->rtv->Release ();
if (data->pov)
data->pov->Release ();
if (data->texture)
data->texture->Release ();
return TRUE;
}
GstD3D11Window *
gst_d3d11_window_dummy_new (GstD3D11Device * device)
{
GstD3D11Window *window;
g_return_val_if_fail (GST_IS_D3D11_DEVICE (device), NULL);
window = (GstD3D11Window *)
g_object_new (GST_TYPE_D3D11_WINDOW_DUMMY, "d3d11device", device, NULL);
window->initialized = TRUE;
g_object_ref_sink (window);
return window;
}

View file

@ -0,0 +1,38 @@
/*
* GStreamer
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_D3D11_WINDOW_DUMMY_H__
#define __GST_D3D11_WINDOW_DUMMY_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstd3d11window.h"
G_BEGIN_DECLS
#define GST_TYPE_D3D11_WINDOW_DUMMY (gst_d3d11_window_dummy_get_type())
G_DECLARE_FINAL_TYPE (GstD3D11WindowDummy,
gst_d3d11_window_dummy, GST, D3D11_WINDOW_DUMMY, GstD3D11Window);
GstD3D11Window * gst_d3d11_window_dummy_new (GstD3D11Device * device);
G_END_DECLS
#endif /* __GST_D3D11_WINDOW_DUMMY_H__ */

View file

@ -13,6 +13,7 @@ d3d11_sources = [
'gstd3d11videosink.c', 'gstd3d11videosink.c',
'gstd3d11videosinkbin.c', 'gstd3d11videosinkbin.c',
'gstd3d11window.cpp', 'gstd3d11window.cpp',
'gstd3d11window_dummy.cpp',
'plugin.c', 'plugin.c',
] ]