2020-01-07 08:45:22 +00:00
|
|
|
/* GStreamer
|
|
|
|
* Copyright (C) 2020 Seungha Yang <seungha.yang@navercorp.com>
|
|
|
|
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
|
|
|
*/
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
#pragma once
|
2020-01-07 08:45:22 +00:00
|
|
|
|
2020-12-20 20:11:03 +00:00
|
|
|
#include "gstmfconfig.h"
|
|
|
|
|
2020-01-07 08:45:22 +00:00
|
|
|
#include <gst/gst.h>
|
|
|
|
#include <gst/video/video.h>
|
|
|
|
#include "gstmfutils.h"
|
|
|
|
#include "gstmftransform.h"
|
|
|
|
|
2020-12-20 20:11:03 +00:00
|
|
|
#if GST_MF_HAVE_D3D11
|
|
|
|
#include <gst/d3d11/gstd3d11.h>
|
|
|
|
#endif
|
|
|
|
|
2020-01-07 08:45:22 +00:00
|
|
|
G_BEGIN_DECLS
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
#define GST_TYPE_MF_VIDEO_ENCODER (gst_mf_video_encoder_get_type())
|
|
|
|
#define GST_MF_VIDEO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoder))
|
|
|
|
#define GST_MF_VIDEO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoderClass))
|
|
|
|
#define GST_MF_VIDEO_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoderClass))
|
|
|
|
#define GST_IS_MF_VIDEO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_VIDEO_ENCODER))
|
|
|
|
#define GST_IS_MF_VIDEO_ENCODER_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_VIDEO_ENCODER))
|
2020-01-07 08:45:22 +00:00
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
typedef struct _GstMFVideoEncoder GstMFVideoEncoder;
|
|
|
|
typedef struct _GstMFVideoEncoderClass GstMFVideoEncoderClass;
|
|
|
|
typedef struct _GstMFVideoEncoderDeviceCaps GstMFVideoEncoderDeviceCaps;
|
|
|
|
typedef struct _GstMFVideoEncoderClassData GstMFVideoEncoderClassData;
|
2020-12-26 11:39:07 +00:00
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
struct _GstMFVideoEncoderDeviceCaps
|
2020-12-26 11:39:07 +00:00
|
|
|
{
|
|
|
|
gboolean rc_mode; /* AVEncCommonRateControlMode */
|
|
|
|
gboolean quality; /* AVEncCommonQuality */
|
|
|
|
|
|
|
|
gboolean adaptive_mode; /* AVEncAdaptiveMode */
|
|
|
|
gboolean buffer_size; /* AVEncCommonBufferSize */
|
2022-03-26 14:48:46 +00:00
|
|
|
gboolean mean_bitrate; /* AVEncCommonMeanBitRate */
|
2020-12-26 11:39:07 +00:00
|
|
|
gboolean max_bitrate; /* AVEncCommonMaxBitRate */
|
|
|
|
gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */
|
|
|
|
gboolean cabac; /* AVEncH264CABACEnable */
|
|
|
|
gboolean sps_id; /* AVEncH264SPSID */
|
|
|
|
gboolean pps_id; /* AVEncH264PPSID */
|
|
|
|
gboolean bframes; /* AVEncMPVDefaultBPictureCount */
|
|
|
|
gboolean gop_size; /* AVEncMPVGOPSize */
|
|
|
|
gboolean threads; /* AVEncNumWorkerThreads */
|
|
|
|
gboolean content_type; /* AVEncVideoContentType */
|
|
|
|
gboolean qp; /* AVEncVideoEncodeQP */
|
|
|
|
gboolean force_keyframe; /* AVEncVideoForceKeyFrame */
|
|
|
|
gboolean low_latency; /* AVLowLatencyMode */
|
|
|
|
|
|
|
|
gboolean min_qp; /* AVEncVideoMinQP */
|
|
|
|
gboolean max_qp; /* AVEncVideoMaxQP */
|
|
|
|
gboolean frame_type_qp; /* AVEncVideoEncodeFrameTypeQP */
|
|
|
|
gboolean max_num_ref; /* AVEncVideoMaxNumRefFrame */
|
|
|
|
guint max_num_ref_high;
|
|
|
|
guint max_num_ref_low;
|
2020-12-20 20:11:03 +00:00
|
|
|
|
|
|
|
/* TRUE if MFT support d3d11 and also we can use d3d11 interop */
|
|
|
|
gboolean d3d11_aware;
|
2021-10-08 14:07:32 +00:00
|
|
|
/* DXGI adapter LUID, valid only when d3d11_aware == TRUE */
|
|
|
|
gint64 adapter_luid;
|
2020-12-26 11:39:07 +00:00
|
|
|
};
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
struct _GstMFVideoEncoderClassData
|
2020-12-26 11:39:07 +00:00
|
|
|
{
|
|
|
|
GstCaps *sink_caps;
|
|
|
|
GstCaps *src_caps;
|
|
|
|
gchar *device_name;
|
|
|
|
guint32 enum_flags;
|
|
|
|
guint device_index;
|
2022-01-27 16:45:30 +00:00
|
|
|
GstMFVideoEncoderDeviceCaps device_caps;
|
2020-12-26 11:39:07 +00:00
|
|
|
gboolean is_default;
|
|
|
|
};
|
2020-01-07 08:45:22 +00:00
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
struct _GstMFVideoEncoder
|
2020-01-07 08:45:22 +00:00
|
|
|
{
|
|
|
|
GstVideoEncoder parent;
|
|
|
|
|
|
|
|
GstMFTransform *transform;
|
mfvideoenc: Improve latency performance for hardware encoder
Unlike software MFT (Media Foundation Transform) which is synchronous
in terms of processing input and output data, hardware MFT works
in asynchronous mode. output data might not be available right after
we pushed one input data into MFT.
Note that async MFT will fire two events, one is "METransformNeedInput"
which happens when MFT can accept more input data,
and the other is "METransformHaveOutput", that's for signaling
there's pending data which can be outputted immediately.
To listen the events, we can wait synchronously via
IMFMediaEventGenerator::GetEvent() or make use of IMFAsyncCallback
object which is asynchronous way and the event will be notified
from Media Foundation's internal worker queue thread.
To handle such asynchronous operation, previous working flow was
as follows (IMFMediaEventGenerator::GetEvent() was used for now)
- Check if there is pending output data and push the data toward downstream.
- Pulling events (from streaming thread) until there's at least
one pending "METransformNeedInput" event
- Then, push one data into MFT from streaming thread
- Check if there is pending "METransformHaveOutput" again.
If there is, push new output data to downstream
(unlikely there is pending output data at this moment)
Above flow was processed from upstream streaming thread. That means
even if there's available output data, it could be outputted later
when the next buffer is pushed from upstream streaming thread.
It would introduce at least one frame latency in case of live stream.
To reduce such latency, this commit modifies the flow to be fully
asynchronous like hardware MFT was designed and to be able to
output encoded data whenever it's available. More specifically,
IMFAsyncCallback object will be used for handling
"METransformNeedInput" and "METransformHaveOutput" events from
Media Foundation's internal thread, and new output data will be
also outputted from the Media Foundation's thread.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1520>
2020-08-18 18:19:26 +00:00
|
|
|
gboolean async_mft;
|
|
|
|
GstFlowReturn last_ret;
|
2020-01-07 08:45:22 +00:00
|
|
|
|
|
|
|
GstVideoCodecState *input_state;
|
2020-12-20 20:11:03 +00:00
|
|
|
|
2021-06-22 16:43:08 +00:00
|
|
|
/* Set by subclass */
|
|
|
|
gboolean has_reorder_frame;
|
|
|
|
|
|
|
|
/* Calculated timestamp offset in MF timescale (100ns scale)
|
|
|
|
* when B-frame is enabled. */
|
|
|
|
LONGLONG mf_pts_offset;
|
|
|
|
|
2022-06-25 21:39:54 +00:00
|
|
|
gboolean need_align;
|
|
|
|
|
2020-12-20 20:11:03 +00:00
|
|
|
#if GST_MF_HAVE_D3D11
|
|
|
|
/* For D3D11 interop. */
|
|
|
|
GstD3D11Device *other_d3d11_device;
|
|
|
|
GstD3D11Device *d3d11_device;
|
|
|
|
IMFDXGIDeviceManager *device_manager;
|
|
|
|
UINT reset_token;
|
|
|
|
IMFVideoSampleAllocatorEx *mf_allocator;
|
2022-07-22 15:41:49 +00:00
|
|
|
GstD3D11Fence *fence;
|
2020-12-20 20:11:03 +00:00
|
|
|
#endif
|
2020-01-07 08:45:22 +00:00
|
|
|
};
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
struct _GstMFVideoEncoderClass
|
2020-01-07 08:45:22 +00:00
|
|
|
{
|
|
|
|
GstVideoEncoderClass parent_class;
|
|
|
|
|
2020-12-26 11:39:07 +00:00
|
|
|
/* Set by subclass */
|
|
|
|
GUID codec_id; /* Output subtype of MFT */
|
|
|
|
guint32 enum_flags; /* MFT_ENUM_FLAG */
|
|
|
|
guint device_index; /* Index of enumerated IMFActivate via MFTEnum */
|
2022-01-27 16:45:30 +00:00
|
|
|
GstMFVideoEncoderDeviceCaps device_caps;
|
2020-01-07 08:45:22 +00:00
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
gboolean (*set_option) (GstMFVideoEncoder * encoder,
|
2020-12-26 18:16:28 +00:00
|
|
|
GstVideoCodecState * state,
|
2020-01-07 08:45:22 +00:00
|
|
|
IMFMediaType * output_type);
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
gboolean (*set_src_caps) (GstMFVideoEncoder * encoder,
|
2020-01-07 08:45:22 +00:00
|
|
|
GstVideoCodecState * state,
|
|
|
|
IMFMediaType * output_type);
|
2022-03-26 14:48:46 +00:00
|
|
|
|
|
|
|
gboolean (*check_reconfigure) (GstMFVideoEncoder * encoder);
|
2020-01-07 08:45:22 +00:00
|
|
|
};
|
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
GType gst_mf_video_encoder_get_type (void);
|
2020-01-07 08:45:22 +00:00
|
|
|
|
2022-01-27 16:45:30 +00:00
|
|
|
void gst_mf_video_encoder_register (GstPlugin * plugin,
|
|
|
|
guint rank,
|
|
|
|
GUID * subtype,
|
|
|
|
GTypeInfo * type_info,
|
|
|
|
GList * d3d11_device);
|
2020-12-26 11:39:07 +00:00
|
|
|
|
2020-01-07 08:45:22 +00:00
|
|
|
G_END_DECLS
|