2019-01-30 11:07:29 +00:00
|
|
|
/* GStreamer
|
|
|
|
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
|
2019-01-30 11:07:29 +00:00
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
|
|
|
*/
|
|
|
|
|
2021-02-21 08:38:38 +00:00
|
|
|
/**
|
|
|
|
* SECTION:element-d3d11videosinkelement
|
|
|
|
* @title: d3d11videosinkelement
|
|
|
|
*
|
|
|
|
* Direct3D11 based video render element. This element allows only Direct3D11
|
|
|
|
* textures as a input. Use #d3d11videosink instead which is a convenient
|
|
|
|
* wrapper of #d3d11videosinkelement with #d3d11upload.
|
|
|
|
*
|
|
|
|
* ## Example launch line
|
|
|
|
* ```
|
|
|
|
* gst-launch-1.0 videotestsrc ! d3d11upload ! d3d11videosinkelement
|
|
|
|
* ```
|
|
|
|
* This pipeline will display test video stream on screen via d3d11videosinkelement
|
|
|
|
*
|
|
|
|
* Since: 1.18
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
#include "config.h"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include "gstd3d11videosink.h"
|
2020-01-29 12:10:00 +00:00
|
|
|
#include "gstd3d11videoprocessor.h"
|
2020-12-20 17:47:45 +00:00
|
|
|
#include "gstd3d11pluginutils.h"
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2021-01-18 10:17:14 +00:00
|
|
|
#if GST_D3D11_WINAPI_APP
|
2019-12-30 09:58:59 +00:00
|
|
|
#include "gstd3d11window_corewindow.h"
|
|
|
|
#include "gstd3d11window_swapchainpanel.h"
|
2021-01-18 10:17:14 +00:00
|
|
|
#endif
|
|
|
|
#if (!GST_D3D11_WINAPI_ONLY_APP)
|
2019-12-30 09:58:59 +00:00
|
|
|
#include "gstd3d11window_win32.h"
|
|
|
|
#endif
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
#include "gstd3d11window_dummy.h"
|
2019-12-30 09:58:59 +00:00
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_0,
|
|
|
|
PROP_ADAPTER,
|
|
|
|
PROP_FORCE_ASPECT_RATIO,
|
2019-12-15 10:29:10 +00:00
|
|
|
PROP_ENABLE_NAVIGATION_EVENTS,
|
|
|
|
PROP_FULLSCREEN_TOGGLE_MODE,
|
|
|
|
PROP_FULLSCREEN,
|
2020-11-21 19:39:57 +00:00
|
|
|
PROP_RENDER_STATS,
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
PROP_DRAW_ON_SHARED_TEXTURE,
|
2019-01-30 11:07:29 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
#define DEFAULT_ADAPTER -1
|
|
|
|
#define DEFAULT_FORCE_ASPECT_RATIO TRUE
|
|
|
|
#define DEFAULT_ENABLE_NAVIGATION_EVENTS TRUE
|
2019-12-15 10:29:10 +00:00
|
|
|
#define DEFAULT_FULLSCREEN_TOGGLE_MODE GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_NONE
|
|
|
|
#define DEFAULT_FULLSCREEN FALSE
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
#define DEFAULT_DRAW_ON_SHARED_TEXTURE FALSE
|
|
|
|
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
/* signals */
|
|
|
|
SIGNAL_BEGIN_DRAW,
|
|
|
|
|
|
|
|
/* actions */
|
|
|
|
SIGNAL_DRAW,
|
|
|
|
|
|
|
|
LAST_SIGNAL
|
|
|
|
};
|
|
|
|
|
|
|
|
static guint gst_d3d11_video_sink_signals[LAST_SIGNAL] = { 0, };
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2021-01-19 17:29:43 +00:00
|
|
|
static GstStaticCaps pad_template_caps =
|
2019-08-24 08:43:43 +00:00
|
|
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
|
2021-01-19 17:29:43 +00:00
|
|
|
(GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SINK_FORMATS) "; "
|
|
|
|
GST_VIDEO_CAPS_MAKE_WITH_FEATURES
|
|
|
|
(GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
|
|
|
|
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
|
|
|
|
GST_D3D11_SINK_FORMATS));
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY (d3d11_video_sink_debug);
|
|
|
|
#define GST_CAT_DEFAULT d3d11_video_sink_debug
|
|
|
|
|
2020-09-25 18:27:39 +00:00
|
|
|
struct _GstD3D11VideoSink
|
|
|
|
{
|
|
|
|
GstVideoSink parent;
|
|
|
|
GstD3D11Device *device;
|
|
|
|
GstD3D11Window *window;
|
|
|
|
gint video_width;
|
|
|
|
gint video_height;
|
|
|
|
|
|
|
|
GstVideoInfo info;
|
|
|
|
|
|
|
|
guintptr window_id;
|
|
|
|
|
|
|
|
/* properties */
|
|
|
|
gint adapter;
|
|
|
|
gboolean force_aspect_ratio;
|
|
|
|
gboolean enable_navigation_events;
|
|
|
|
GstD3D11WindowFullscreenToggleMode fullscreen_toggle_mode;
|
|
|
|
gboolean fullscreen;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
gboolean draw_on_shared_texture;
|
2020-09-25 18:27:39 +00:00
|
|
|
|
|
|
|
/* saved render rectangle until we have a window */
|
|
|
|
GstVideoRectangle render_rect;
|
|
|
|
gboolean pending_render_rect;
|
|
|
|
|
|
|
|
GstBufferPool *fallback_pool;
|
|
|
|
gboolean can_convert;
|
|
|
|
gboolean have_video_processor;
|
2021-03-15 10:48:56 +00:00
|
|
|
gboolean processor_in_use;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
|
|
|
|
/* For drawing on user texture */
|
|
|
|
GstD3D11VideoSinkCallbacks callbacks;
|
|
|
|
gpointer user_data;
|
|
|
|
gboolean drawing;
|
|
|
|
GstBuffer *current_buffer;
|
|
|
|
GRecMutex draw_lock;
|
2020-09-25 18:27:39 +00:00
|
|
|
};
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
static void gst_d3d11_videosink_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec);
|
|
|
|
static void gst_d3d11_videosink_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec);
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
static void gst_d3d11_video_sink_finalize (GObject * object);
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_draw_action (GstD3D11VideoSink * self,
|
|
|
|
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
|
|
|
|
guint64 release_key);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_video_overlay_init (GstVideoOverlayInterface * iface);
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_navigation_init (GstNavigationInterface * iface);
|
|
|
|
|
|
|
|
static void gst_d3d11_video_sink_set_context (GstElement * element,
|
|
|
|
GstContext * context);
|
|
|
|
static GstCaps *gst_d3d11_video_sink_get_caps (GstBaseSink * sink,
|
|
|
|
GstCaps * filter);
|
|
|
|
static gboolean gst_d3d11_video_sink_set_caps (GstBaseSink * sink,
|
|
|
|
GstCaps * caps);
|
|
|
|
|
|
|
|
static gboolean gst_d3d11_video_sink_start (GstBaseSink * sink);
|
|
|
|
static gboolean gst_d3d11_video_sink_stop (GstBaseSink * sink);
|
|
|
|
static gboolean gst_d3d11_video_sink_propose_allocation (GstBaseSink * sink,
|
|
|
|
GstQuery * query);
|
2019-07-18 14:12:13 +00:00
|
|
|
static gboolean gst_d3d11_video_sink_query (GstBaseSink * sink,
|
|
|
|
GstQuery * query);
|
2019-12-02 14:27:42 +00:00
|
|
|
static gboolean gst_d3d11_video_sink_unlock (GstBaseSink * sink);
|
2019-12-30 09:58:59 +00:00
|
|
|
static gboolean gst_d3d11_video_sink_unlock_stop (GstBaseSink * sink);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_d3d11_video_sink_show_frame (GstVideoSink * sink, GstBuffer * buf);
|
2020-12-20 13:12:44 +00:00
|
|
|
static gboolean gst_d3d11_video_sink_prepare_window (GstD3D11VideoSink * self);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
#define gst_d3d11_video_sink_parent_class parent_class
|
|
|
|
G_DEFINE_TYPE_WITH_CODE (GstD3D11VideoSink, gst_d3d11_video_sink,
|
|
|
|
GST_TYPE_VIDEO_SINK,
|
|
|
|
G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_OVERLAY,
|
|
|
|
gst_d3d11_video_sink_video_overlay_init);
|
|
|
|
G_IMPLEMENT_INTERFACE (GST_TYPE_NAVIGATION,
|
|
|
|
gst_d3d11_video_sink_navigation_init);
|
|
|
|
GST_DEBUG_CATEGORY_INIT (d3d11_video_sink_debug,
|
|
|
|
"d3d11videosink", 0, "Direct3D11 Video Sink"));
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_class_init (GstD3D11VideoSinkClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
|
|
|
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
|
|
|
GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
|
|
|
|
GstVideoSinkClass *videosink_class = GST_VIDEO_SINK_CLASS (klass);
|
2021-01-19 17:29:43 +00:00
|
|
|
GstCaps *caps;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
gobject_class->set_property = gst_d3d11_videosink_set_property;
|
|
|
|
gobject_class->get_property = gst_d3d11_videosink_get_property;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
gobject_class->finalize = gst_d3d11_video_sink_finalize;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_ADAPTER,
|
|
|
|
g_param_spec_int ("adapter", "Adapter",
|
|
|
|
"Adapter index for creating device (-1 for default)",
|
|
|
|
-1, G_MAXINT32, DEFAULT_ADAPTER,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GParamFlags) (G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
|
|
|
|
G_PARAM_STATIC_STRINGS)));
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
|
|
|
|
g_param_spec_boolean ("force-aspect-ratio",
|
|
|
|
"Force aspect ratio",
|
|
|
|
"When enabled, scaling will respect original aspect ratio",
|
|
|
|
DEFAULT_FORCE_ASPECT_RATIO,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_ENABLE_NAVIGATION_EVENTS,
|
|
|
|
g_param_spec_boolean ("enable-navigation-events",
|
|
|
|
"Enable navigation events",
|
|
|
|
"When enabled, navigation events are sent upstream",
|
|
|
|
DEFAULT_ENABLE_NAVIGATION_EVENTS,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2019-12-15 10:29:10 +00:00
|
|
|
g_object_class_install_property (gobject_class, PROP_FULLSCREEN_TOGGLE_MODE,
|
|
|
|
g_param_spec_flags ("fullscreen-toggle-mode",
|
|
|
|
"Full screen toggle mode",
|
|
|
|
"Full screen toggle mode used to trigger fullscreen mode change",
|
|
|
|
GST_D3D11_WINDOW_TOGGLE_MODE_GET_TYPE, DEFAULT_FULLSCREEN_TOGGLE_MODE,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
|
2019-12-15 10:29:10 +00:00
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_FULLSCREEN,
|
|
|
|
g_param_spec_boolean ("fullscreen",
|
|
|
|
"fullscreen",
|
|
|
|
"Ignored when \"fullscreen-toggle-mode\" does not include \"property\"",
|
2021-03-13 08:40:57 +00:00
|
|
|
DEFAULT_FULLSCREEN,
|
|
|
|
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
|
2019-12-15 10:29:10 +00:00
|
|
|
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
/**
|
|
|
|
* GstD3D11VideoSink:draw-on-shared-texture:
|
|
|
|
*
|
2021-02-21 08:38:38 +00:00
|
|
|
* Instruct the sink to draw on a shared texture provided by user.
|
|
|
|
* User must watch #d3d11videosinkelement::begin-draw signal and should call
|
|
|
|
* #d3d11videosinkelement::draw method on the #d3d11videosinkelement::begin-draw
|
|
|
|
* signal handler.
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
*
|
|
|
|
* Currently supported formats for user texture are:
|
|
|
|
* - DXGI_FORMAT_R8G8B8A8_UNORM
|
|
|
|
* - DXGI_FORMAT_B8G8R8A8_UNORM
|
|
|
|
* - DXGI_FORMAT_R10G10B10A2_UNORM
|
|
|
|
*
|
|
|
|
* Since: 1.20
|
|
|
|
*/
|
|
|
|
g_object_class_install_property (gobject_class, PROP_DRAW_ON_SHARED_TEXTURE,
|
|
|
|
g_param_spec_boolean ("draw-on-shared-texture",
|
|
|
|
"Draw on shared texture",
|
|
|
|
"Draw on user provided shared texture instead of window. "
|
|
|
|
"When enabled, user can pass application's own texture to sink "
|
|
|
|
"by using \"draw\" action signal on \"begin-draw\" signal handler, "
|
|
|
|
"so that sink can draw video data on application's texture. "
|
|
|
|
"Supported texture formats for user texture are "
|
|
|
|
"DXGI_FORMAT_R8G8B8A8_UNORM, DXGI_FORMAT_B8G8R8A8_UNORM, and "
|
|
|
|
"DXGI_FORMAT_R10G10B10A2_UNORM.",
|
|
|
|
DEFAULT_DRAW_ON_SHARED_TEXTURE,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GParamFlags) (G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
|
|
|
|
G_PARAM_STATIC_STRINGS)));
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
|
|
|
|
/**
|
2021-02-21 08:38:38 +00:00
|
|
|
* GstD3D11VideoSink::begin-draw:
|
|
|
|
* @videosink: the #d3d11videosinkelement
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
*
|
2021-02-21 08:38:38 +00:00
|
|
|
* Emitted when sink has a texture to draw. Application needs to invoke
|
|
|
|
* #d3d11videosinkelement::draw action signal before returning from
|
|
|
|
* #d3d11videosinkelement::begin-draw signal handler.
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
*
|
|
|
|
* Since: 1.20
|
|
|
|
*/
|
|
|
|
gst_d3d11_video_sink_signals[SIGNAL_BEGIN_DRAW] =
|
|
|
|
g_signal_new ("begin-draw", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
|
|
|
|
G_STRUCT_OFFSET (GstD3D11VideoSinkClass, begin_draw),
|
|
|
|
NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
|
|
|
|
|
|
|
|
/**
|
2021-02-21 08:38:38 +00:00
|
|
|
* GstD3D11VideoSink::draw:
|
|
|
|
* @videosink: the #d3d11videosinkelement
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
* @shard_handle: a pointer to HANDLE
|
|
|
|
* @texture_misc_flags: a D3D11_RESOURCE_MISC_FLAG value
|
|
|
|
* @acquire_key: a key value used for IDXGIKeyedMutex::AcquireSync
|
|
|
|
* @release_key: a key value used for IDXGIKeyedMutex::ReleaseSync
|
|
|
|
*
|
2021-02-21 08:38:38 +00:00
|
|
|
* Draws on a shared texture. @shard_handle must be a valid pointer to
|
|
|
|
* a HANDLE which was obtained via IDXGIResource::GetSharedHandle or
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
* IDXGIResource1::CreateSharedHandle.
|
|
|
|
*
|
2021-02-21 08:38:38 +00:00
|
|
|
* If the texture was created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag,
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
* caller must specify valid @acquire_key and @release_key.
|
|
|
|
* Otherwise (i.e., created with D3D11_RESOURCE_MISC_SHARED flag),
|
|
|
|
* @acquire_key and @release_key will be ignored.
|
|
|
|
*
|
|
|
|
* Since: 1.20
|
|
|
|
*/
|
|
|
|
gst_d3d11_video_sink_signals[SIGNAL_DRAW] =
|
|
|
|
g_signal_new ("draw", G_TYPE_FROM_CLASS (klass),
|
2021-03-13 08:40:57 +00:00
|
|
|
(GSignalFlags) (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
G_STRUCT_OFFSET (GstD3D11VideoSinkClass, draw), NULL, NULL, NULL,
|
|
|
|
G_TYPE_BOOLEAN, 4, G_TYPE_POINTER, G_TYPE_UINT, G_TYPE_UINT64,
|
|
|
|
G_TYPE_UINT64);
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
element_class->set_context =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_set_context);
|
|
|
|
|
|
|
|
gst_element_class_set_static_metadata (element_class,
|
|
|
|
"Direct3D11 video sink", "Sink/Video",
|
|
|
|
"A Direct3D11 based videosink",
|
|
|
|
"Seungha Yang <seungha.yang@navercorp.com>");
|
|
|
|
|
2021-01-19 17:29:43 +00:00
|
|
|
caps = gst_d3d11_get_updated_template_caps (&pad_template_caps);
|
|
|
|
gst_element_class_add_pad_template (element_class,
|
|
|
|
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps));
|
|
|
|
gst_caps_unref (caps);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_get_caps);
|
|
|
|
basesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_set_caps);
|
|
|
|
basesink_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_start);
|
|
|
|
basesink_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_stop);
|
|
|
|
basesink_class->propose_allocation =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_propose_allocation);
|
2019-07-18 14:12:13 +00:00
|
|
|
basesink_class->query = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_query);
|
2019-12-02 14:27:42 +00:00
|
|
|
basesink_class->unlock = GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_unlock);
|
2019-12-30 09:58:59 +00:00
|
|
|
basesink_class->unlock_stop =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_unlock_stop);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
videosink_class->show_frame =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_d3d11_video_sink_show_frame);
|
2020-06-08 15:20:08 +00:00
|
|
|
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
klass->draw = gst_d3d11_video_sink_draw_action;
|
|
|
|
|
2021-03-13 08:40:57 +00:00
|
|
|
gst_type_mark_as_plugin_api (GST_D3D11_WINDOW_TOGGLE_MODE_GET_TYPE,
|
|
|
|
(GstPluginAPIFlags) 0);
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_init (GstD3D11VideoSink * self)
|
|
|
|
{
|
|
|
|
self->adapter = DEFAULT_ADAPTER;
|
|
|
|
self->force_aspect_ratio = DEFAULT_FORCE_ASPECT_RATIO;
|
|
|
|
self->enable_navigation_events = DEFAULT_ENABLE_NAVIGATION_EVENTS;
|
2019-12-15 10:29:10 +00:00
|
|
|
self->fullscreen_toggle_mode = DEFAULT_FULLSCREEN_TOGGLE_MODE;
|
|
|
|
self->fullscreen = DEFAULT_FULLSCREEN;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
self->draw_on_shared_texture = DEFAULT_DRAW_ON_SHARED_TEXTURE;
|
|
|
|
|
|
|
|
g_rec_mutex_init (&self->draw_lock);
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_videosink_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (object);
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (self);
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_ADAPTER:
|
|
|
|
self->adapter = g_value_get_int (value);
|
|
|
|
break;
|
|
|
|
case PROP_FORCE_ASPECT_RATIO:
|
|
|
|
self->force_aspect_ratio = g_value_get_boolean (value);
|
|
|
|
if (self->window)
|
|
|
|
g_object_set (self->window,
|
|
|
|
"force-aspect-ratio", self->force_aspect_ratio, NULL);
|
|
|
|
break;
|
|
|
|
case PROP_ENABLE_NAVIGATION_EVENTS:
|
|
|
|
self->enable_navigation_events = g_value_get_boolean (value);
|
|
|
|
if (self->window) {
|
|
|
|
g_object_set (self->window,
|
|
|
|
"enable-navigation-events", self->enable_navigation_events, NULL);
|
|
|
|
}
|
|
|
|
break;
|
2019-12-15 10:29:10 +00:00
|
|
|
case PROP_FULLSCREEN_TOGGLE_MODE:
|
2021-03-13 08:40:57 +00:00
|
|
|
self->fullscreen_toggle_mode =
|
|
|
|
(GstD3D11WindowFullscreenToggleMode) g_value_get_flags (value);
|
2019-12-15 10:29:10 +00:00
|
|
|
if (self->window) {
|
|
|
|
g_object_set (self->window,
|
|
|
|
"fullscreen-toggle-mode", self->fullscreen_toggle_mode, NULL);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case PROP_FULLSCREEN:
|
|
|
|
self->fullscreen = g_value_get_boolean (value);
|
|
|
|
if (self->window) {
|
|
|
|
g_object_set (self->window, "fullscreen", self->fullscreen, NULL);
|
|
|
|
}
|
|
|
|
break;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
case PROP_DRAW_ON_SHARED_TEXTURE:
|
|
|
|
self->draw_on_shared_texture = g_value_get_boolean (value);
|
|
|
|
break;
|
2019-01-30 11:07:29 +00:00
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
GST_OBJECT_UNLOCK (self);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_videosink_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_ADAPTER:
|
|
|
|
g_value_set_int (value, self->adapter);
|
|
|
|
break;
|
|
|
|
case PROP_FORCE_ASPECT_RATIO:
|
|
|
|
g_value_set_boolean (value, self->force_aspect_ratio);
|
|
|
|
break;
|
|
|
|
case PROP_ENABLE_NAVIGATION_EVENTS:
|
|
|
|
g_value_set_boolean (value, self->enable_navigation_events);
|
|
|
|
break;
|
2019-12-15 10:29:10 +00:00
|
|
|
case PROP_FULLSCREEN_TOGGLE_MODE:
|
|
|
|
g_value_set_flags (value, self->fullscreen_toggle_mode);
|
|
|
|
break;
|
|
|
|
case PROP_FULLSCREEN:
|
|
|
|
if (self->window) {
|
|
|
|
g_object_get_property (G_OBJECT (self->window), pspec->name, value);
|
|
|
|
} else {
|
|
|
|
g_value_set_boolean (value, self->fullscreen);
|
|
|
|
}
|
|
|
|
break;
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
case PROP_DRAW_ON_SHARED_TEXTURE:
|
|
|
|
g_value_set_boolean (value, self->draw_on_shared_texture);
|
|
|
|
break;
|
2019-01-30 11:07:29 +00:00
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_finalize (GObject * object)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (object);
|
|
|
|
|
|
|
|
g_rec_mutex_clear (&self->draw_lock);
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
|
|
|
}
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_set_context (GstElement * element, GstContext * context)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (element);
|
|
|
|
|
2019-08-23 12:41:39 +00:00
|
|
|
gst_d3d11_handle_set_context (element, context, self->adapter, &self->device);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
|
|
|
|
}
|
|
|
|
|
2020-12-20 17:47:45 +00:00
|
|
|
static GstCaps *
|
|
|
|
gst_d3d11_video_sink_get_supported_caps (GstD3D11VideoSink * self,
|
|
|
|
D3D11_FORMAT_SUPPORT flags)
|
|
|
|
{
|
|
|
|
GstD3D11Device *device;
|
|
|
|
ID3D11Device *d3d11_device;
|
|
|
|
HRESULT hr;
|
2021-03-13 08:40:57 +00:00
|
|
|
guint i;
|
2020-12-20 17:47:45 +00:00
|
|
|
GValue v_list = G_VALUE_INIT;
|
|
|
|
GstCaps *supported_caps;
|
|
|
|
static const GstVideoFormat format_list[] = {
|
|
|
|
GST_VIDEO_FORMAT_BGRA,
|
|
|
|
GST_VIDEO_FORMAT_RGBA,
|
|
|
|
GST_VIDEO_FORMAT_RGB10A2_LE,
|
|
|
|
GST_VIDEO_FORMAT_VUYA,
|
|
|
|
GST_VIDEO_FORMAT_NV12,
|
|
|
|
GST_VIDEO_FORMAT_P010_10LE,
|
|
|
|
GST_VIDEO_FORMAT_P016_LE,
|
|
|
|
GST_VIDEO_FORMAT_I420,
|
|
|
|
GST_VIDEO_FORMAT_I420_10LE,
|
|
|
|
};
|
|
|
|
|
|
|
|
device = self->device;
|
|
|
|
|
|
|
|
d3d11_device = gst_d3d11_device_get_device_handle (device);
|
|
|
|
g_value_init (&v_list, GST_TYPE_LIST);
|
|
|
|
|
|
|
|
for (i = 0; i < G_N_ELEMENTS (format_list); i++) {
|
|
|
|
UINT format_support = 0;
|
|
|
|
GstVideoFormat format;
|
|
|
|
const GstD3D11Format *d3d11_format;
|
|
|
|
|
|
|
|
d3d11_format = gst_d3d11_device_format_from_gst (device, format_list[i]);
|
|
|
|
if (!d3d11_format || d3d11_format->dxgi_format == DXGI_FORMAT_UNKNOWN)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
format = d3d11_format->format;
|
2021-03-13 08:40:57 +00:00
|
|
|
hr = d3d11_device->CheckFormatSupport (d3d11_format->dxgi_format,
|
|
|
|
&format_support);
|
2020-12-20 17:47:45 +00:00
|
|
|
|
|
|
|
if (SUCCEEDED (hr) && ((format_support & flags) == flags)) {
|
|
|
|
GValue v_str = G_VALUE_INIT;
|
|
|
|
g_value_init (&v_str, G_TYPE_STRING);
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (self, "d3d11 device can support %s with flags 0x%x",
|
|
|
|
gst_video_format_to_string (format), flags);
|
|
|
|
g_value_set_string (&v_str, gst_video_format_to_string (format));
|
|
|
|
gst_value_list_append_and_take_value (&v_list, &v_str);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
supported_caps = gst_caps_new_simple ("video/x-raw",
|
|
|
|
"width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
|
|
|
|
gst_caps_set_value (supported_caps, "format", &v_list);
|
|
|
|
g_value_unset (&v_list);
|
|
|
|
|
|
|
|
gst_caps_set_features_simple (supported_caps,
|
|
|
|
gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY));
|
|
|
|
|
|
|
|
return supported_caps;
|
|
|
|
}
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
static GstCaps *
|
|
|
|
gst_d3d11_video_sink_get_caps (GstBaseSink * sink, GstCaps * filter)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
GstCaps *caps = NULL;
|
|
|
|
|
2019-12-24 06:54:57 +00:00
|
|
|
if (self->device && !self->can_convert) {
|
|
|
|
GstCaps *overlaycaps;
|
|
|
|
GstCapsFeatures *features;
|
|
|
|
|
2020-12-20 17:47:45 +00:00
|
|
|
caps = gst_d3d11_video_sink_get_supported_caps (self,
|
2021-03-13 08:40:57 +00:00
|
|
|
(D3D11_FORMAT_SUPPORT) (D3D11_FORMAT_SUPPORT_TEXTURE2D |
|
|
|
|
D3D11_FORMAT_SUPPORT_DISPLAY));
|
2019-12-24 06:54:57 +00:00
|
|
|
overlaycaps = gst_caps_copy (caps);
|
|
|
|
features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
|
2020-08-20 17:36:13 +00:00
|
|
|
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, NULL);
|
2019-12-24 06:54:57 +00:00
|
|
|
gst_caps_set_features_simple (overlaycaps, features);
|
|
|
|
gst_caps_append (caps, overlaycaps);
|
|
|
|
}
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
if (!caps)
|
|
|
|
caps = gst_pad_get_pad_template_caps (GST_VIDEO_SINK_PAD (sink));
|
|
|
|
|
|
|
|
if (caps && filter) {
|
|
|
|
GstCaps *isect;
|
|
|
|
isect = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
caps = isect;
|
|
|
|
}
|
|
|
|
|
|
|
|
return caps;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_set_caps (GstBaseSink * sink, GstCaps * caps)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
gint video_width, video_height;
|
|
|
|
gint video_par_n, video_par_d; /* video's PAR */
|
|
|
|
gint display_par_n = 1, display_par_d = 1; /* display's PAR */
|
|
|
|
guint num, den;
|
2019-10-04 12:15:15 +00:00
|
|
|
GError *error = NULL;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "set caps %" GST_PTR_FORMAT, caps);
|
|
|
|
|
2020-12-20 13:12:44 +00:00
|
|
|
if (!gst_d3d11_video_sink_prepare_window (self))
|
|
|
|
goto no_window;
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
if (!gst_video_info_from_caps (&self->info, caps))
|
|
|
|
goto invalid_format;
|
|
|
|
|
|
|
|
video_width = GST_VIDEO_INFO_WIDTH (&self->info);
|
|
|
|
video_height = GST_VIDEO_INFO_HEIGHT (&self->info);
|
|
|
|
video_par_n = GST_VIDEO_INFO_PAR_N (&self->info);
|
|
|
|
video_par_d = GST_VIDEO_INFO_PAR_D (&self->info);
|
|
|
|
|
|
|
|
/* get aspect ratio from caps if it's present, and
|
|
|
|
* convert video width and height to a display width and height
|
|
|
|
* using wd / hd = wv / hv * PARv / PARd */
|
|
|
|
|
|
|
|
/* TODO: Get display PAR */
|
|
|
|
|
|
|
|
if (!gst_video_calculate_display_ratio (&num, &den, video_width,
|
|
|
|
video_height, video_par_n, video_par_d, display_par_n, display_par_d))
|
|
|
|
goto no_disp_ratio;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (sink,
|
|
|
|
"video width/height: %dx%d, calculated display ratio: %d/%d format: %s",
|
|
|
|
video_width, video_height, num, den,
|
|
|
|
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->info)));
|
|
|
|
|
|
|
|
/* now find a width x height that respects this display ratio.
|
|
|
|
* prefer those that have one of w/h the same as the incoming video
|
|
|
|
* using wd / hd = num / den
|
|
|
|
*/
|
|
|
|
|
|
|
|
/* start with same height, because of interlaced video
|
|
|
|
* check hd / den is an integer scale factor, and scale wd with the PAR
|
|
|
|
*/
|
|
|
|
if (video_height % den == 0) {
|
|
|
|
GST_DEBUG_OBJECT (self, "keeping video height");
|
|
|
|
GST_VIDEO_SINK_WIDTH (self) = (guint)
|
|
|
|
gst_util_uint64_scale_int (video_height, num, den);
|
|
|
|
GST_VIDEO_SINK_HEIGHT (self) = video_height;
|
|
|
|
} else if (video_width % num == 0) {
|
|
|
|
GST_DEBUG_OBJECT (self, "keeping video width");
|
|
|
|
GST_VIDEO_SINK_WIDTH (self) = video_width;
|
|
|
|
GST_VIDEO_SINK_HEIGHT (self) = (guint)
|
|
|
|
gst_util_uint64_scale_int (video_width, den, num);
|
|
|
|
} else {
|
|
|
|
GST_DEBUG_OBJECT (self, "approximating while keeping video height");
|
|
|
|
GST_VIDEO_SINK_WIDTH (self) = (guint)
|
|
|
|
gst_util_uint64_scale_int (video_height, num, den);
|
|
|
|
GST_VIDEO_SINK_HEIGHT (self) = video_height;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "scaling to %dx%d",
|
|
|
|
GST_VIDEO_SINK_WIDTH (self), GST_VIDEO_SINK_HEIGHT (self));
|
2019-08-15 07:20:26 +00:00
|
|
|
self->video_width = video_width;
|
|
|
|
self->video_height = video_height;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
if (GST_VIDEO_SINK_WIDTH (self) <= 0 || GST_VIDEO_SINK_HEIGHT (self) <= 0)
|
|
|
|
goto no_display_size;
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (self);
|
|
|
|
if (!self->pending_render_rect) {
|
|
|
|
self->render_rect.x = 0;
|
|
|
|
self->render_rect.y = 0;
|
|
|
|
self->render_rect.w = GST_VIDEO_SINK_WIDTH (self);
|
|
|
|
self->render_rect.h = GST_VIDEO_SINK_HEIGHT (self);
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_d3d11_window_set_render_rectangle (self->window,
|
|
|
|
self->render_rect.x, self->render_rect.y, self->render_rect.w,
|
|
|
|
self->render_rect.h);
|
|
|
|
self->pending_render_rect = FALSE;
|
|
|
|
GST_OBJECT_UNLOCK (self);
|
|
|
|
|
2020-01-29 12:10:00 +00:00
|
|
|
self->have_video_processor = FALSE;
|
2019-01-30 11:07:29 +00:00
|
|
|
if (!gst_d3d11_window_prepare (self->window, GST_VIDEO_SINK_WIDTH (self),
|
2020-03-02 11:55:29 +00:00
|
|
|
GST_VIDEO_SINK_HEIGHT (self), caps, &self->have_video_processor,
|
|
|
|
&error)) {
|
2019-10-04 12:15:15 +00:00
|
|
|
GstMessage *error_msg;
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
GST_ERROR_OBJECT (self, "cannot create swapchain");
|
2019-10-04 12:15:15 +00:00
|
|
|
error_msg = gst_message_new_error (GST_OBJECT_CAST (self),
|
|
|
|
error, "Failed to prepare d3d11window");
|
|
|
|
g_clear_error (&error);
|
|
|
|
gst_element_post_message (GST_ELEMENT (self), error_msg);
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2019-12-02 14:27:42 +00:00
|
|
|
if (self->fallback_pool) {
|
|
|
|
gst_buffer_pool_set_active (self->fallback_pool, FALSE);
|
2020-12-19 16:06:24 +00:00
|
|
|
gst_clear_object (&self->fallback_pool);
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
2019-12-15 07:23:00 +00:00
|
|
|
{
|
2019-12-08 15:14:53 +00:00
|
|
|
GstD3D11AllocationParams *d3d11_params;
|
2020-01-29 12:10:00 +00:00
|
|
|
gint bind_flags = D3D11_BIND_SHADER_RESOURCE;
|
|
|
|
|
|
|
|
if (self->have_video_processor) {
|
|
|
|
/* To create video processor input view, one of following bind flags
|
|
|
|
* is required
|
|
|
|
* NOTE: Any texture arrays which were created with D3D11_BIND_DECODER flag
|
|
|
|
* cannot be used for shader input.
|
|
|
|
*
|
|
|
|
* D3D11_BIND_DECODER
|
|
|
|
* D3D11_BIND_VIDEO_ENCODER
|
|
|
|
* D3D11_BIND_RENDER_TARGET
|
|
|
|
* D3D11_BIND_UNORDERED_ACCESS_VIEW
|
|
|
|
*/
|
|
|
|
bind_flags |= D3D11_BIND_RENDER_TARGET;
|
|
|
|
}
|
2019-12-08 15:14:53 +00:00
|
|
|
|
2020-12-19 16:06:24 +00:00
|
|
|
d3d11_params = gst_d3d11_allocation_params_new (self->device,
|
2021-03-13 08:40:57 +00:00
|
|
|
&self->info, (GstD3D11AllocationFlags) 0, bind_flags);
|
2019-12-08 15:14:53 +00:00
|
|
|
|
2020-12-19 16:06:24 +00:00
|
|
|
self->fallback_pool = gst_d3d11_buffer_pool_new_with_options (self->device,
|
2021-01-27 17:02:28 +00:00
|
|
|
caps, d3d11_params, 2, 0);
|
2019-12-08 15:14:53 +00:00
|
|
|
gst_d3d11_allocation_params_free (d3d11_params);
|
2019-12-03 13:54:26 +00:00
|
|
|
}
|
|
|
|
|
2020-12-19 16:06:24 +00:00
|
|
|
if (!self->fallback_pool) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to configure fallback pool");
|
|
|
|
return FALSE;
|
|
|
|
}
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2021-03-15 10:48:56 +00:00
|
|
|
self->processor_in_use = FALSE;
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
return TRUE;
|
|
|
|
|
|
|
|
/* ERRORS */
|
|
|
|
invalid_format:
|
|
|
|
{
|
|
|
|
GST_DEBUG_OBJECT (sink,
|
|
|
|
"Could not locate image format from caps %" GST_PTR_FORMAT, caps);
|
|
|
|
return FALSE;
|
|
|
|
}
|
2020-12-20 13:12:44 +00:00
|
|
|
no_window:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (sink, RESOURCE, NOT_FOUND, (NULL),
|
|
|
|
("Failed to open window."));
|
|
|
|
return FALSE;
|
|
|
|
}
|
2019-01-30 11:07:29 +00:00
|
|
|
no_disp_ratio:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (sink, CORE, NEGOTIATION, (NULL),
|
|
|
|
("Error calculating the output display ratio of the video."));
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
no_display_size:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (sink, CORE, NEGOTIATION, (NULL),
|
|
|
|
("Error calculating the output display ratio of the video."));
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_key_event (GstD3D11Window * window, const gchar * event,
|
|
|
|
const gchar * key, GstD3D11VideoSink * self)
|
|
|
|
{
|
|
|
|
if (self->enable_navigation_events) {
|
|
|
|
GST_LOG_OBJECT (self, "send key event %s, key %s", event, key);
|
|
|
|
gst_navigation_send_key_event (GST_NAVIGATION (self), event, key);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_mouse_key_event (GstD3D11Window * window, const gchar * event,
|
|
|
|
gint button, gdouble x, gdouble y, GstD3D11VideoSink * self)
|
|
|
|
{
|
|
|
|
if (self->enable_navigation_events) {
|
|
|
|
GST_LOG_OBJECT (self,
|
|
|
|
"send mouse event %s, button %d (%.1f, %.1f)", event, button, x, y);
|
|
|
|
gst_navigation_send_mouse_event (GST_NAVIGATION (self), event, button, x,
|
|
|
|
y);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_start (GstBaseSink * sink)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
2019-12-03 13:54:26 +00:00
|
|
|
gboolean is_hardware = TRUE;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Start");
|
|
|
|
|
2019-08-23 12:41:39 +00:00
|
|
|
if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), self->adapter,
|
|
|
|
&self->device)) {
|
2019-01-30 11:07:29 +00:00
|
|
|
GST_ERROR_OBJECT (sink, "Cannot create d3d11device");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2020-12-20 13:12:44 +00:00
|
|
|
g_object_get (self->device, "hardware", &is_hardware, NULL);
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
|
2020-12-20 13:12:44 +00:00
|
|
|
if (!is_hardware) {
|
|
|
|
GST_WARNING_OBJECT (self, "D3D11 device is running on software emulation");
|
|
|
|
self->can_convert = FALSE;
|
|
|
|
} else {
|
|
|
|
self->can_convert = TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_prepare_window (GstD3D11VideoSink * self)
|
|
|
|
{
|
|
|
|
GstD3D11WindowNativeType window_type = GST_D3D11_WINDOW_NATIVE_TYPE_HWND;
|
|
|
|
|
|
|
|
if (self->window)
|
|
|
|
return TRUE;
|
|
|
|
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
if (self->draw_on_shared_texture) {
|
|
|
|
GST_INFO_OBJECT (self,
|
|
|
|
"Create dummy window for rendering on shared texture");
|
|
|
|
self->window = gst_d3d11_window_dummy_new (self->device);
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
2019-12-30 09:58:59 +00:00
|
|
|
if (!self->window_id)
|
|
|
|
gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (self));
|
|
|
|
|
|
|
|
if (self->window_id) {
|
|
|
|
window_type =
|
|
|
|
gst_d3d11_window_get_native_type_from_handle (self->window_id);
|
|
|
|
|
|
|
|
if (window_type != GST_D3D11_WINDOW_NATIVE_TYPE_NONE) {
|
2020-12-20 13:12:44 +00:00
|
|
|
GST_DEBUG_OBJECT (self, "Have window handle %" G_GUINTPTR_FORMAT,
|
|
|
|
self->window_id);
|
2019-12-30 09:58:59 +00:00
|
|
|
gst_video_overlay_got_window_handle (GST_VIDEO_OVERLAY (self),
|
|
|
|
self->window_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Create window (type: %s)",
|
|
|
|
gst_d3d11_window_get_native_type_to_string (window_type));
|
|
|
|
|
|
|
|
#if GST_D3D11_WINAPI_ONLY_APP
|
|
|
|
if (window_type != GST_D3D11_WINDOW_NATIVE_TYPE_CORE_WINDOW &&
|
|
|
|
window_type != GST_D3D11_WINDOW_NATIVE_TYPE_SWAP_CHAIN_PANEL) {
|
2021-01-18 10:23:30 +00:00
|
|
|
GST_ERROR_OBJECT (self, "Overlay handle must be set before READY state");
|
2019-01-30 11:07:29 +00:00
|
|
|
return FALSE;
|
|
|
|
}
|
2019-12-30 09:58:59 +00:00
|
|
|
#endif
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2019-12-30 09:58:59 +00:00
|
|
|
switch (window_type) {
|
|
|
|
#if (!GST_D3D11_WINAPI_ONLY_APP)
|
|
|
|
case GST_D3D11_WINDOW_NATIVE_TYPE_HWND:
|
|
|
|
self->window = gst_d3d11_window_win32_new (self->device, self->window_id);
|
|
|
|
break;
|
2021-01-18 10:17:14 +00:00
|
|
|
#endif
|
|
|
|
#if GST_D3D11_WINAPI_APP
|
2019-12-30 09:58:59 +00:00
|
|
|
case GST_D3D11_WINDOW_NATIVE_TYPE_CORE_WINDOW:
|
|
|
|
self->window = gst_d3d11_window_core_window_new (self->device,
|
|
|
|
self->window_id);
|
|
|
|
break;
|
|
|
|
case GST_D3D11_WINDOW_NATIVE_TYPE_SWAP_CHAIN_PANEL:
|
|
|
|
self->window = gst_d3d11_window_swap_chain_panel_new (self->device,
|
|
|
|
self->window_id);
|
|
|
|
break;
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2019-12-03 13:54:26 +00:00
|
|
|
|
2019-12-30 09:58:59 +00:00
|
|
|
if (!self->window) {
|
2020-12-20 13:12:44 +00:00
|
|
|
GST_ERROR_OBJECT (self, "Cannot create d3d11window");
|
2019-12-30 09:58:59 +00:00
|
|
|
return FALSE;
|
2019-12-03 13:54:26 +00:00
|
|
|
}
|
|
|
|
|
2019-12-30 09:58:59 +00:00
|
|
|
GST_OBJECT_LOCK (self);
|
2019-01-30 11:07:29 +00:00
|
|
|
g_object_set (self->window,
|
2019-12-30 09:58:59 +00:00
|
|
|
"force-aspect-ratio", self->force_aspect_ratio,
|
|
|
|
"fullscreen-toggle-mode", self->fullscreen_toggle_mode,
|
|
|
|
"fullscreen", self->fullscreen,
|
2019-01-30 11:07:29 +00:00
|
|
|
"enable-navigation-events", self->enable_navigation_events, NULL);
|
2019-12-30 09:58:59 +00:00
|
|
|
GST_OBJECT_UNLOCK (self);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
g_signal_connect (self->window, "key-event",
|
|
|
|
G_CALLBACK (gst_d3d11_video_sink_key_event), self);
|
|
|
|
g_signal_connect (self->window, "mouse-event",
|
|
|
|
G_CALLBACK (gst_d3d11_video_mouse_key_event), self);
|
2019-12-30 09:58:59 +00:00
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_stop (GstBaseSink * sink)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Stop");
|
|
|
|
|
2019-12-02 14:27:42 +00:00
|
|
|
if (self->fallback_pool) {
|
|
|
|
gst_buffer_pool_set_active (self->fallback_pool, FALSE);
|
|
|
|
gst_object_unref (self->fallback_pool);
|
|
|
|
self->fallback_pool = NULL;
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:52:59 +00:00
|
|
|
if (self->window)
|
|
|
|
gst_d3d11_window_unprepare (self->window);
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
gst_clear_object (&self->device);
|
|
|
|
gst_clear_object (&self->window);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_propose_allocation (GstBaseSink * sink, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
GstCaps *caps;
|
|
|
|
GstBufferPool *pool = NULL;
|
|
|
|
GstVideoInfo info;
|
|
|
|
guint size;
|
|
|
|
gboolean need_pool;
|
|
|
|
|
|
|
|
if (!self->device || !self->window)
|
|
|
|
return FALSE;
|
|
|
|
|
|
|
|
gst_query_parse_allocation (query, &caps, &need_pool);
|
|
|
|
|
|
|
|
if (caps == NULL)
|
|
|
|
goto no_caps;
|
|
|
|
|
|
|
|
if (!gst_video_info_from_caps (&info, caps))
|
|
|
|
goto invalid_caps;
|
|
|
|
|
|
|
|
/* the normal size of a frame */
|
|
|
|
size = info.size;
|
|
|
|
|
|
|
|
if (need_pool) {
|
2019-12-15 07:23:00 +00:00
|
|
|
GstD3D11AllocationParams *d3d11_params;
|
2019-12-03 13:54:26 +00:00
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
GST_DEBUG_OBJECT (self, "create new pool");
|
|
|
|
|
2021-03-13 08:40:57 +00:00
|
|
|
d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
|
|
|
|
(GstD3D11AllocationFlags) 0, D3D11_BIND_SHADER_RESOURCE);
|
2020-12-19 16:06:24 +00:00
|
|
|
pool = gst_d3d11_buffer_pool_new_with_options (self->device, caps,
|
2021-01-27 17:02:28 +00:00
|
|
|
d3d11_params, 2, 0);
|
2019-12-15 07:23:00 +00:00
|
|
|
gst_d3d11_allocation_params_free (d3d11_params);
|
2019-12-03 13:54:26 +00:00
|
|
|
|
2020-12-19 16:06:24 +00:00
|
|
|
if (!pool) {
|
|
|
|
GST_ERROR_OBJECT (self, "Failed to create buffer pool");
|
|
|
|
return FALSE;
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
2020-12-19 16:06:24 +00:00
|
|
|
|
|
|
|
size = GST_D3D11_BUFFER_POOL (pool)->buffer_size;
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
2021-01-27 17:02:28 +00:00
|
|
|
/* We need at least 2 buffers because we hold on to the last one for redrawing
|
|
|
|
* on window-resize event */
|
|
|
|
gst_query_add_allocation_pool (query, pool, size, 2, 0);
|
2019-01-30 11:07:29 +00:00
|
|
|
if (pool)
|
|
|
|
g_object_unref (pool);
|
|
|
|
|
|
|
|
gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
|
2019-12-24 06:54:57 +00:00
|
|
|
gst_query_add_allocation_meta (query,
|
|
|
|
GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
|
|
|
|
/* ERRORS */
|
|
|
|
no_caps:
|
|
|
|
{
|
|
|
|
GST_WARNING_OBJECT (self, "no caps specified");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
invalid_caps:
|
|
|
|
{
|
|
|
|
GST_WARNING_OBJECT (self, "invalid caps specified");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
2019-07-18 14:12:13 +00:00
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_query (GstBaseSink * sink, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_CONTEXT:
|
|
|
|
if (gst_d3d11_handle_context_query (GST_ELEMENT (self), query,
|
|
|
|
self->device)) {
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return GST_BASE_SINK_CLASS (parent_class)->query (sink, query);
|
|
|
|
}
|
|
|
|
|
2019-12-02 14:27:42 +00:00
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_unlock (GstBaseSink * sink)
|
2019-01-30 11:07:29 +00:00
|
|
|
{
|
2019-12-02 14:27:42 +00:00
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2019-12-02 14:27:42 +00:00
|
|
|
if (self->window)
|
2019-12-30 09:58:59 +00:00
|
|
|
gst_d3d11_window_unlock (self->window);
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_unlock_stop (GstBaseSink * sink)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
|
|
|
|
|
|
|
if (self->window)
|
|
|
|
gst_d3d11_window_unlock_stop (self->window);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2019-12-02 14:27:42 +00:00
|
|
|
return TRUE;
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
2019-12-19 15:01:26 +00:00
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_upload_frame (GstD3D11VideoSink * self, GstBuffer * inbuf,
|
|
|
|
GstBuffer * outbuf)
|
|
|
|
{
|
2020-01-30 15:17:13 +00:00
|
|
|
GstVideoFrame in_frame, out_frame;
|
|
|
|
gboolean ret;
|
2019-12-19 15:01:26 +00:00
|
|
|
|
2020-06-12 10:44:01 +00:00
|
|
|
GST_LOG_OBJECT (self, "Copy to fallback buffer");
|
|
|
|
|
2019-12-19 15:01:26 +00:00
|
|
|
if (!gst_video_frame_map (&in_frame, &self->info, inbuf,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GstMapFlags) (GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)))
|
2019-12-19 15:01:26 +00:00
|
|
|
goto invalid_buffer;
|
|
|
|
|
2020-01-30 15:17:13 +00:00
|
|
|
if (!gst_video_frame_map (&out_frame, &self->info, outbuf,
|
2021-03-13 08:40:57 +00:00
|
|
|
(GstMapFlags) (GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF))) {
|
2020-01-30 15:17:13 +00:00
|
|
|
gst_video_frame_unmap (&in_frame);
|
|
|
|
goto invalid_buffer;
|
2019-12-19 15:01:26 +00:00
|
|
|
}
|
|
|
|
|
2020-01-30 15:17:13 +00:00
|
|
|
ret = gst_video_frame_copy (&out_frame, &in_frame);
|
2019-12-19 15:01:26 +00:00
|
|
|
|
2020-01-30 15:17:13 +00:00
|
|
|
gst_video_frame_unmap (&in_frame);
|
|
|
|
gst_video_frame_unmap (&out_frame);
|
2019-12-24 06:54:57 +00:00
|
|
|
|
2019-12-19 15:01:26 +00:00
|
|
|
return ret;
|
|
|
|
|
|
|
|
/* ERRORS */
|
|
|
|
invalid_buffer:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_WARNING (self, CORE, NOT_IMPLEMENTED, (NULL),
|
|
|
|
("invalid video buffer received"));
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-12 11:11:29 +00:00
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_copy_d3d11_to_d3d11 (GstD3D11VideoSink * self,
|
|
|
|
GstBuffer * inbuf, GstBuffer * outbuf)
|
|
|
|
{
|
2020-12-19 17:39:40 +00:00
|
|
|
GST_LOG_OBJECT (self, "Copy to fallback buffer using device memory copy");
|
2020-06-12 11:11:29 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
return gst_d3d11_buffer_copy_into (outbuf, inbuf, &self->info);
|
|
|
|
}
|
2020-06-12 11:11:29 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_get_fallback_buffer (GstD3D11VideoSink * self,
|
|
|
|
GstBuffer * inbuf, GstBuffer ** fallback_buf, gboolean device_copy)
|
|
|
|
{
|
|
|
|
GstBuffer *outbuf = NULL;
|
|
|
|
ID3D11ShaderResourceView *view[GST_VIDEO_MAX_PLANES];
|
|
|
|
GstVideoOverlayCompositionMeta *compo_meta;
|
|
|
|
|
|
|
|
if (!self->fallback_pool ||
|
|
|
|
!gst_buffer_pool_set_active (self->fallback_pool, TRUE) ||
|
|
|
|
gst_buffer_pool_acquire_buffer (self->fallback_pool, &outbuf,
|
|
|
|
NULL) != GST_FLOW_OK) {
|
|
|
|
GST_ERROR_OBJECT (self, "fallback pool is unavailable");
|
|
|
|
return FALSE;
|
|
|
|
}
|
2020-06-12 11:11:29 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
/* Ensure SRV */
|
|
|
|
if (!gst_d3d11_buffer_get_shader_resource_view (outbuf, view)) {
|
|
|
|
GST_ERROR_OBJECT (self, "fallback SRV is unavailable");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (device_copy) {
|
|
|
|
if (!gst_d3d11_video_sink_copy_d3d11_to_d3d11 (self, inbuf, outbuf)) {
|
|
|
|
GST_ERROR_OBJECT (self, "cannot copy frame");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
} else if (!gst_d3d11_video_sink_upload_frame (self, inbuf, outbuf)) {
|
|
|
|
GST_ERROR_OBJECT (self, "cannot upload frame");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Copy overlaycomposition meta if any */
|
|
|
|
compo_meta = gst_buffer_get_video_overlay_composition_meta (inbuf);
|
|
|
|
if (compo_meta)
|
|
|
|
gst_buffer_add_video_overlay_composition_meta (outbuf, compo_meta->overlay);
|
|
|
|
|
|
|
|
*fallback_buf = outbuf;
|
2020-06-12 11:11:29 +00:00
|
|
|
|
|
|
|
return TRUE;
|
2020-12-19 17:39:40 +00:00
|
|
|
|
|
|
|
error:
|
|
|
|
gst_buffer_unref (outbuf);
|
|
|
|
return FALSE;
|
2020-06-12 11:11:29 +00:00
|
|
|
}
|
|
|
|
|
2019-01-30 11:07:29 +00:00
|
|
|
static GstFlowReturn
|
|
|
|
gst_d3d11_video_sink_show_frame (GstVideoSink * sink, GstBuffer * buf)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (sink);
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
2019-01-30 11:07:29 +00:00
|
|
|
GstVideoRectangle rect = { 0, };
|
2020-12-19 17:39:40 +00:00
|
|
|
GstBuffer *fallback_buf = NULL;
|
|
|
|
ID3D11Device *device_handle =
|
|
|
|
gst_d3d11_device_get_device_handle (self->device);
|
|
|
|
ID3D11ShaderResourceView *view[GST_VIDEO_MAX_PLANES];
|
2019-12-03 13:54:26 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
if (!gst_d3d11_buffer_can_access_device (buf, device_handle)) {
|
|
|
|
GST_LOG_OBJECT (self, "Need fallback buffer");
|
2019-12-02 14:27:42 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
if (!gst_d3d11_video_sink_get_fallback_buffer (self, buf, &fallback_buf,
|
|
|
|
FALSE)) {
|
2019-12-02 14:27:42 +00:00
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
2020-12-19 17:39:40 +00:00
|
|
|
} else {
|
|
|
|
gboolean direct_rendering = FALSE;
|
|
|
|
|
|
|
|
/* Check if we can use video processor for conversion */
|
|
|
|
if (gst_buffer_n_memory (buf) == 1 && self->have_video_processor) {
|
|
|
|
GstD3D11Memory *mem = (GstD3D11Memory *) gst_buffer_peek_memory (buf, 0);
|
|
|
|
D3D11_TEXTURE2D_DESC desc;
|
|
|
|
|
|
|
|
gst_d3d11_memory_get_texture_desc (mem, &desc);
|
|
|
|
if ((desc.BindFlags & D3D11_BIND_DECODER) == D3D11_BIND_DECODER) {
|
|
|
|
GST_TRACE_OBJECT (self,
|
|
|
|
"Got VideoProcessor compatible texture, do direct rendering");
|
|
|
|
direct_rendering = TRUE;
|
2021-03-15 10:48:56 +00:00
|
|
|
self->processor_in_use = TRUE;
|
|
|
|
} else if (self->processor_in_use &&
|
|
|
|
(desc.BindFlags & D3D11_BIND_RENDER_TARGET) ==
|
|
|
|
D3D11_BIND_RENDER_TARGET) {
|
|
|
|
direct_rendering = TRUE;
|
2020-06-12 10:44:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
/* Or, SRV should be available */
|
|
|
|
if (!direct_rendering) {
|
|
|
|
if (gst_d3d11_buffer_get_shader_resource_view (buf, view)) {
|
|
|
|
GST_TRACE_OBJECT (self, "SRV is available, do direct rendering");
|
|
|
|
direct_rendering = TRUE;
|
2020-06-12 11:11:29 +00:00
|
|
|
}
|
2020-12-19 17:39:40 +00:00
|
|
|
}
|
2019-01-30 11:07:29 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
if (!direct_rendering &&
|
|
|
|
!gst_d3d11_video_sink_get_fallback_buffer (self, buf, &fallback_buf,
|
|
|
|
TRUE)) {
|
2019-12-02 14:27:42 +00:00
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
gst_d3d11_window_show (self->window);
|
|
|
|
|
2019-12-15 06:44:20 +00:00
|
|
|
/* FIXME: add support crop meta */
|
|
|
|
rect.w = self->video_width;
|
|
|
|
rect.h = self->video_height;
|
2019-01-30 11:07:29 +00:00
|
|
|
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
if (self->draw_on_shared_texture) {
|
|
|
|
g_rec_mutex_lock (&self->draw_lock);
|
|
|
|
self->current_buffer = fallback_buf ? fallback_buf : buf;
|
|
|
|
self->drawing = TRUE;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (self, "Begin drawing");
|
|
|
|
|
|
|
|
/* Application should call draw method on this callback */
|
|
|
|
if (self->callbacks.begin_draw) {
|
|
|
|
self->callbacks.begin_draw (self, self->user_data);
|
|
|
|
} else {
|
|
|
|
g_signal_emit (self, gst_d3d11_video_sink_signals[SIGNAL_BEGIN_DRAW], 0,
|
|
|
|
NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (self, "End drawing");
|
|
|
|
self->drawing = FALSE;
|
|
|
|
self->current_buffer = NULL;
|
|
|
|
g_rec_mutex_unlock (&self->draw_lock);
|
|
|
|
} else {
|
|
|
|
ret = gst_d3d11_window_render (self->window,
|
2021-03-25 12:17:07 +00:00
|
|
|
fallback_buf ? fallback_buf : buf, &rect);
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
}
|
2020-11-21 19:39:57 +00:00
|
|
|
|
2020-12-19 17:39:40 +00:00
|
|
|
gst_clear_buffer (&fallback_buf);
|
2019-01-30 11:07:29 +00:00
|
|
|
|
|
|
|
if (ret == GST_D3D11_WINDOW_FLOW_CLOSED) {
|
|
|
|
GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND,
|
|
|
|
("Output window was closed"), (NULL));
|
|
|
|
|
|
|
|
ret = GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* VideoOverlay interface */
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_set_window_handle (GstVideoOverlay * overlay,
|
|
|
|
guintptr window_id)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (overlay);
|
|
|
|
|
|
|
|
GST_DEBUG ("set window handle %" G_GUINTPTR_FORMAT, window_id);
|
|
|
|
|
|
|
|
self->window_id = window_id;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_set_render_rectangle (GstVideoOverlay * overlay, gint x,
|
|
|
|
gint y, gint width, gint height)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (overlay);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self,
|
|
|
|
"render rect x: %d, y: %d, width: %d, height %d", x, y, width, height);
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (self);
|
|
|
|
if (self->window) {
|
|
|
|
gst_d3d11_window_set_render_rectangle (self->window, x, y, width, height);
|
|
|
|
} else {
|
|
|
|
self->render_rect.x = x;
|
|
|
|
self->render_rect.y = y;
|
|
|
|
self->render_rect.w = width;
|
|
|
|
self->render_rect.h = height;
|
|
|
|
self->pending_render_rect = TRUE;
|
|
|
|
}
|
|
|
|
GST_OBJECT_UNLOCK (self);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_expose (GstVideoOverlay * overlay)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (overlay);
|
|
|
|
|
|
|
|
if (self->window && self->window->swap_chain) {
|
|
|
|
GstVideoRectangle rect = { 0, };
|
|
|
|
rect.w = GST_VIDEO_SINK_WIDTH (self);
|
|
|
|
rect.h = GST_VIDEO_SINK_HEIGHT (self);
|
|
|
|
|
2021-03-25 12:17:07 +00:00
|
|
|
gst_d3d11_window_render (self->window, NULL, &rect);
|
2019-01-30 11:07:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_video_overlay_init (GstVideoOverlayInterface * iface)
|
|
|
|
{
|
|
|
|
iface->set_window_handle = gst_d3d11_video_sink_set_window_handle;
|
|
|
|
iface->set_render_rectangle = gst_d3d11_video_sink_set_render_rectangle;
|
|
|
|
iface->expose = gst_d3d11_video_sink_expose;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Navigation interface */
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_navigation_send_event (GstNavigation * navigation,
|
|
|
|
GstStructure * structure)
|
|
|
|
{
|
|
|
|
GstD3D11VideoSink *self = GST_D3D11_VIDEO_SINK (navigation);
|
|
|
|
gboolean handled = FALSE;
|
|
|
|
GstEvent *event = NULL;
|
|
|
|
GstVideoRectangle src = { 0, };
|
|
|
|
GstVideoRectangle dst = { 0, };
|
|
|
|
GstVideoRectangle result;
|
|
|
|
gdouble x, y, xscale = 1.0, yscale = 1.0;
|
|
|
|
|
|
|
|
if (!self->window) {
|
|
|
|
gst_structure_free (structure);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self->force_aspect_ratio) {
|
|
|
|
/* We get the frame position using the calculated geometry from _setcaps
|
|
|
|
that respect pixel aspect ratios */
|
|
|
|
src.w = GST_VIDEO_SINK_WIDTH (self);
|
|
|
|
src.h = GST_VIDEO_SINK_HEIGHT (self);
|
|
|
|
dst.w = self->render_rect.w;
|
|
|
|
dst.h = self->render_rect.h;
|
|
|
|
|
|
|
|
gst_video_sink_center_rect (src, dst, &result, TRUE);
|
|
|
|
result.x += self->render_rect.x;
|
|
|
|
result.y += self->render_rect.y;
|
|
|
|
} else {
|
|
|
|
memcpy (&result, &self->render_rect, sizeof (GstVideoRectangle));
|
|
|
|
}
|
|
|
|
|
|
|
|
xscale = (gdouble) GST_VIDEO_INFO_WIDTH (&self->info) / result.w;
|
|
|
|
yscale = (gdouble) GST_VIDEO_INFO_HEIGHT (&self->info) / result.h;
|
|
|
|
|
|
|
|
/* Converting pointer coordinates to the non scaled geometry */
|
|
|
|
if (gst_structure_get_double (structure, "pointer_x", &x)) {
|
|
|
|
x = MIN (x, result.x + result.w);
|
|
|
|
x = MAX (x - result.x, 0);
|
|
|
|
gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE,
|
|
|
|
(gdouble) x * xscale, NULL);
|
|
|
|
}
|
|
|
|
if (gst_structure_get_double (structure, "pointer_y", &y)) {
|
|
|
|
y = MIN (y, result.y + result.h);
|
|
|
|
y = MAX (y - result.y, 0);
|
|
|
|
gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE,
|
|
|
|
(gdouble) y * yscale, NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
event = gst_event_new_navigation (structure);
|
|
|
|
if (event) {
|
|
|
|
gst_event_ref (event);
|
|
|
|
handled = gst_pad_push_event (GST_VIDEO_SINK_PAD (self), event);
|
|
|
|
|
|
|
|
if (!handled)
|
|
|
|
gst_element_post_message (GST_ELEMENT_CAST (self),
|
|
|
|
gst_navigation_message_new_event (GST_OBJECT_CAST (self), event));
|
|
|
|
|
|
|
|
gst_event_unref (event);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_d3d11_video_sink_navigation_init (GstNavigationInterface * iface)
|
|
|
|
{
|
|
|
|
iface->send_event = gst_d3d11_video_sink_navigation_send_event;
|
|
|
|
}
|
d3d11videosink: Add support for drawing on application's own texture
Add a way to support drawing on application's texture instead of
usual window handle.
To make use of this new feature, application should follow below step.
1) Enable this feature by using "draw-on-shared-texture" property
2) Watch "begin-draw" signal
3) On "begin-draw" signal handler, application can request drawing
by using "draw" signal action. Note that "draw" signal action
should be happen before "begin-draw" signal handler is returned
NOTE 1) For texture sharing, creating a texture with
D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX flag is strongly recommend
if possible because we cannot ensure sync a texture
which was created with D3D11_RESOURCE_MISC_SHARED
and it would cause glitch with ID3D11VideoProcessor use case.
NOTE 2) Direct9Ex doesn't support texture sharing which was
created with D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. In other words,
D3D11_RESOURCE_MISC_SHARED is the only option for Direct3D11/Direct9Ex interop.
NOTE 3) Because of missing synchronization around ID3D11VideoProcessor,
If shared texture was created with D3D11_RESOURCE_MISC_SHARED,
d3d11videosink might use fallback texture to convert DXVA texture
to normal Direct3D texture. Then converted texture will be
copied to user-provided shared texture.
* Why not use generic appsink approach?
In order for application to be able to store video data
which was produced by GStreamer in application's own texture,
there would be two possible approaches,
one is copying our texture into application's own texture,
and the other is drawing on application's own texture directly.
The former (appsink way) cannot be a zero-copy by nature.
In order to support zero-copy processing, we need to draw on
application's own texture directly.
For example, assume that application wants RGBA texture.
Then we can imagine following case.
"d3d11h264dec ! d3d11convert ! video/x-raw(memory:D3D11Memory),format=RGBA ! appsink"
^
|_ allocate new Direct3D texture for RGBA format
In above case, d3d11convert will allocate new texture(s) for RGBA format
and then application will copy again the our RGBA texutre into
application's own texture. One texture allocation plus per frame GPU copy will hanppen
in that case therefore.
Moreover, in order for application to be able to access
our texture, we need to allocate texture with additional flags for
application's Direct3D11 device to be able to read texture data.
That would be another implementation burden on our side
But with this MR, we can configure pipeline in this way
"d3d11h264dec ! d3d11videosink".
In that way, we can save at least one texture allocation and
per frame texutre copy since d3d11videosink will convert incoming texture
into application's texture format directly without copy.
* What if we expose texture without conversion and application does
conversion by itself?
As mentioned above, for application to be able to access our texture
from application's Direct3D11 device, we need to allocate texture
in a special form. But in some case, that might not be possible.
Also, if a texture belongs to decoder DPB, exposing such texture
to application is unsafe and usual Direct3D11 shader cannot handle
such texture. To convert format, ID3D11VideoProcessor API needs to
be used but that would be a implementation burden for application.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1873>
2020-12-23 14:49:12 +00:00
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_d3d11_video_sink_draw_action (GstD3D11VideoSink * self,
|
|
|
|
gpointer shared_handle, guint texture_misc_flags,
|
|
|
|
guint64 acquire_key, guint64 release_key)
|
|
|
|
{
|
|
|
|
GstFlowReturn ret;
|
|
|
|
g_return_val_if_fail (shared_handle != NULL, FALSE);
|
|
|
|
|
|
|
|
if (!self->draw_on_shared_texture) {
|
|
|
|
GST_ERROR_OBJECT (self, "Invalid draw call, we are drawing on window");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!shared_handle) {
|
|
|
|
GST_ERROR_OBJECT (self, "Invalid handle");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
g_rec_mutex_lock (&self->draw_lock);
|
|
|
|
if (!self->drawing || !self->current_buffer) {
|
|
|
|
GST_WARNING_OBJECT (self, "Nothing to draw");
|
|
|
|
g_rec_mutex_unlock (&self->draw_lock);
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (self, "Drawing on shared handle %p, MiscFlags: 0x%x"
|
|
|
|
", acquire key: %" G_GUINT64_FORMAT ", release key: %"
|
|
|
|
G_GUINT64_FORMAT, shared_handle, texture_misc_flags, acquire_key,
|
|
|
|
release_key);
|
|
|
|
|
|
|
|
ret = gst_d3d11_window_render_on_shared_handle (self->window,
|
|
|
|
self->current_buffer, shared_handle, texture_misc_flags, acquire_key,
|
|
|
|
release_key);
|
|
|
|
g_rec_mutex_unlock (&self->draw_lock);
|
|
|
|
|
|
|
|
return ret == GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
gst_d3d11_video_sink_set_callbacks (GstD3D11VideoSink * videosink,
|
|
|
|
GstD3D11VideoSinkCallbacks * callbacks, gpointer user_data)
|
|
|
|
{
|
|
|
|
g_return_if_fail (GST_IS_D3D11_VIDEO_SINK (videosink));
|
|
|
|
|
|
|
|
videosink->callbacks = *callbacks;
|
|
|
|
videosink->user_data = user_data;
|
|
|
|
}
|
|
|
|
|
|
|
|
gboolean
|
|
|
|
gst_d3d11_video_sink_draw (GstD3D11VideoSink * videosink,
|
|
|
|
gpointer shared_handle, guint texture_misc_flags, guint64 acquire_key,
|
|
|
|
guint64 release_key)
|
|
|
|
{
|
|
|
|
g_return_val_if_fail (GST_IS_D3D11_VIDEO_SINK (videosink), FALSE);
|
|
|
|
|
|
|
|
return gst_d3d11_video_sink_draw_action (videosink, shared_handle,
|
|
|
|
texture_misc_flags, acquire_key, release_key);
|
|
|
|
}
|