mfvideoenc: Remove duplicated class registration code

Each codec subclass has the same code for class/element registration,
so we can move the code into one helper methodm and that will make
future enhancement simple.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1909>
This commit is contained in:
Seungha Yang 2020-12-26 20:39:07 +09:00 committed by GStreamer Merge Bot
parent 86fdd39147
commit 4b522dd355
6 changed files with 536 additions and 1170 deletions

View file

@ -179,39 +179,6 @@ enum
#define DEFAULT_QP_B 26
#define DEFAULT_REF 2
#define GST_MF_H264_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), G_TYPE_FROM_INSTANCE (obj), GstMFH264EncClass))
typedef struct _GstMFH264EncDeviceCaps
{
/* if CodecAPI is available */
gboolean rc_mode; /* AVEncCommonRateControlMode */
gboolean quality; /* AVEncCommonQuality */
gboolean adaptive_mode; /* AVEncAdaptiveMode */
gboolean buffer_size; /* AVEncCommonBufferSize */
gboolean max_bitrate; /* AVEncCommonMaxBitRate */
gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */
gboolean cabac; /* AVEncH264CABACEnable */
gboolean sps_id; /* AVEncH264SPSID */
gboolean pps_id; /* AVEncH264PPSID */
gboolean bframes; /* AVEncMPVDefaultBPictureCount */
gboolean gop_size; /* AVEncMPVGOPSize */
gboolean threads; /* AVEncNumWorkerThreads */
gboolean content_type; /* AVEncVideoContentType */
gboolean qp; /* AVEncVideoEncodeQP */
gboolean force_keyframe; /* AVEncVideoForceKeyFrame */
gboolean low_latency; /* AVLowLatencyMode */
/* since Windows 8.1 */
gboolean min_qp; /* AVEncVideoMinQP */
gboolean max_qp; /* AVEncVideoMaxQP */
gboolean frame_type_qp; /* AVEncVideoEncodeFrameTypeQP */
gboolean max_num_ref; /* AVEncVideoMaxNumRefFrame */
guint max_num_ref_high;
guint max_num_ref_low;
} GstMFH264EncDeviceCaps;
typedef struct _GstMFH264Enc
{
GstMFVideoEnc parent;
@ -246,21 +213,8 @@ typedef struct _GstMFH264Enc
typedef struct _GstMFH264EncClass
{
GstMFVideoEncClass parent_class;
GstMFH264EncDeviceCaps device_caps;
} GstMFH264EncClass;
typedef struct
{
GstCaps *sink_caps;
GstCaps *src_caps;
gchar *device_name;
guint32 enum_flags;
guint device_index;
GstMFH264EncDeviceCaps device_caps;
gboolean is_default;
} GstMFH264EncClassData;
static GstElementClass *parent_class = NULL;
static void gst_mf_h264_enc_get_property (GObject * object, guint prop_id,
@ -278,13 +232,12 @@ gst_mf_h264_enc_class_init (GstMFH264EncClass * klass, gpointer data)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
GstMFH264EncClassData *cdata = (GstMFH264EncClassData *) data;
GstMFH264EncDeviceCaps *device_caps = &cdata->device_caps;
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
gchar *long_name;
gchar *classification;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
klass->device_caps = *device_caps;
gobject_class->get_property = gst_mf_h264_enc_get_property;
gobject_class->set_property = gst_mf_h264_enc_set_property;
@ -525,7 +478,7 @@ gst_mf_h264_enc_class_init (GstMFH264EncClass * klass, gpointer data)
mfenc_class->codec_id = MFVideoFormat_H264;
mfenc_class->enum_flags = cdata->enum_flags;
mfenc_class->device_index = cdata->device_index;
mfenc_class->can_force_keyframe = device_caps->force_keyframe;
mfenc_class->device_caps = *device_caps;
g_free (cdata->device_name);
gst_caps_unref (cdata->sink_caps);
@ -771,8 +724,8 @@ static gboolean
gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc, IMFMediaType * output_type)
{
GstMFH264Enc *self = (GstMFH264Enc *) mfenc;
GstMFH264EncClass *klass = GST_MF_H264_ENC_GET_CLASS (self);
GstMFH264EncDeviceCaps *device_caps = &klass->device_caps;
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
HRESULT hr;
GstCaps *allowed_caps, *template_caps;
guint selected_profile = eAVEncH264VProfile_Main;
@ -1010,18 +963,9 @@ gst_mf_h264_enc_set_src_caps (GstMFVideoEnc * mfenc,
return TRUE;
}
static void
gst_mf_h264_enc_register (GstPlugin * plugin, guint rank,
const gchar * device_name, const GstMFH264EncDeviceCaps * device_caps,
guint32 enum_flags, guint device_index,
GstCaps * sink_caps, GstCaps * src_caps)
void
gst_mf_h264_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GType type;
gchar *type_name;
gchar *feature_name;
gint i;
GstMFH264EncClassData *cdata;
gboolean is_default = TRUE;
GTypeInfo type_info = {
sizeof (GstMFH264EncClass),
NULL,
@ -1033,386 +977,9 @@ gst_mf_h264_enc_register (GstPlugin * plugin, guint rank,
0,
(GInstanceInitFunc) gst_mf_h264_enc_init,
};
cdata = g_new0 (GstMFH264EncClassData, 1);
cdata->sink_caps = sink_caps;
cdata->src_caps = src_caps;
cdata->device_name = g_strdup (device_name);
cdata->device_caps = *device_caps;
cdata->enum_flags = enum_flags;
cdata->device_index = device_index;
type_info.class_data = cdata;
type_name = g_strdup ("GstMFH264Enc");
feature_name = g_strdup ("mfh264enc");
i = 1;
while (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstMFH264Device%dEnc", i);
feature_name = g_strdup_printf ("mfh264device%denc", i);
is_default = FALSE;
i++;
}
cdata->is_default = is_default;
type =
g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name, &type_info,
(GTypeFlags) 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}
typedef struct
{
guint width;
guint height;
} GstMFH264EncResolution;
typedef struct
{
eAVEncH264VProfile profile;
const gchar *profile_str;
} GStMFH264EncProfileMap;
static void
gst_mf_h264_enc_plugin_init_internal (GstPlugin * plugin, guint rank,
GstMFTransform * transform, guint device_index, guint32 enum_flags)
{
HRESULT hr;
MFT_REGISTER_TYPE_INFO *infos;
UINT32 info_size;
gint i;
GstCaps *src_caps = NULL;
GstCaps *sink_caps = NULL;
GValue *supported_formats = NULL;
gboolean have_I420 = FALSE;
gchar *device_name = NULL;
GstMFH264EncDeviceCaps device_caps = { 0, };
IMFActivate *activate;
IMFTransform *encoder;
ICodecAPI *codec_api;
ComPtr<IMFMediaType> out_type;
GstMFH264EncResolution resolutions_to_check[] = {
{1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160}, {8192, 4320}
};
guint max_width = 0;
guint max_height = 0;
guint resolution;
GStMFH264EncProfileMap profiles_to_check[] = {
{ eAVEncH264VProfile_High, "high" },
{ eAVEncH264VProfile_Main, "main" },
{ eAVEncH264VProfile_Base, "baseline" },
};
guint num_profiles = 0;
GValue profiles = G_VALUE_INIT;
/* NOTE: depending on environment,
* some enumerated h/w MFT might not be usable (e.g., multiple GPU case) */
if (!gst_mf_transform_open (transform))
return;
activate = gst_mf_transform_get_activate_handle (transform);
if (!activate) {
GST_WARNING_OBJECT (transform, "No IMFActivate interface available");
return;
}
encoder = gst_mf_transform_get_transform_handle (transform);
if (!encoder) {
GST_WARNING_OBJECT (transform, "No IMFTransform interface available");
return;
}
codec_api = gst_mf_transform_get_codec_api_handle (transform);
if (!codec_api) {
GST_WARNING_OBJECT (transform, "No ICodecAPI interface available");
return;
}
g_object_get (transform, "device-name", &device_name, NULL);
if (!device_name) {
GST_WARNING_OBJECT (transform, "Unknown device name");
return;
}
g_value_init (&profiles, GST_TYPE_LIST);
hr = activate->GetAllocatedBlob (MFT_INPUT_TYPES_Attributes,
(UINT8 **) & infos, &info_size);
if (!gst_mf_result (hr))
goto done;
for (i = 0; i < info_size / sizeof (MFT_REGISTER_TYPE_INFO); i++) {
GstVideoFormat vformat;
GValue val = G_VALUE_INIT;
vformat = gst_mf_video_subtype_to_video_format (&infos[i].guidSubtype);
if (vformat == GST_VIDEO_FORMAT_UNKNOWN)
continue;
if (!supported_formats) {
supported_formats = g_new0 (GValue, 1);
g_value_init (supported_formats, GST_TYPE_LIST);
}
/* media foundation has duplicated formats IYUV and I420 */
if (vformat == GST_VIDEO_FORMAT_I420) {
if (have_I420)
continue;
have_I420 = TRUE;
}
g_value_init (&val, G_TYPE_STRING);
g_value_set_static_string (&val, gst_video_format_to_string (vformat));
gst_value_list_append_and_take_value (supported_formats, &val);
}
CoTaskMemFree (infos);
if (!supported_formats)
goto done;
/* check supported profiles and resolutions */
hr = MFCreateMediaType (out_type.GetAddressOf ());
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_H264);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_AVG_BITRATE, 2048000);
if (!gst_mf_result (hr))
goto done;
hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, 30, 1);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (!gst_mf_result (hr))
goto done;
GST_DEBUG_OBJECT (transform, "Check supported profiles of %s",
device_name);
for (i = 0; i < G_N_ELEMENTS (profiles_to_check); i++) {
GValue profile_val = G_VALUE_INIT;
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE,
profiles_to_check[i].profile);
if (!gst_mf_result (hr))
goto done;
hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE,
resolutions_to_check[0].width, resolutions_to_check[0].height);
if (!gst_mf_result (hr))
break;
if (!gst_mf_transform_set_output_type (transform, out_type.Get ()))
break;
GST_DEBUG_OBJECT (transform, "MFT supports h264 %s profile",
profiles_to_check[i].profile_str);
g_value_init (&profile_val, G_TYPE_STRING);
g_value_set_static_string (&profile_val, profiles_to_check[i].profile_str);
gst_value_list_append_and_take_value (&profiles, &profile_val);
num_profiles++;
/* clear media type */
gst_mf_transform_set_output_type (transform, NULL);
}
if (num_profiles == 0) {
GST_WARNING_OBJECT (transform, "Couldn't query supported profile");
goto done;
}
/* baseline is default profile */
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base);
if (!gst_mf_result (hr))
goto done;
GST_DEBUG_OBJECT (transform, "Check supported resolutions of %s",
device_name);
/* FIXME: This would take so long time.
* Need to find smart way to find supported resolution*/
#if 0
for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
guint width, height;
width = resolutions_to_check[i].width;
height = resolutions_to_check[i].height;
hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, width, height);
if (!gst_mf_result (hr))
break;
if (!gst_mf_transform_set_output_type (transform, out_type.Get ()))
break;
max_width = width;
max_height = height;
GST_DEBUG_OBJECT (transform,
"MFT supports resolution %dx%d", max_width, max_height);
/* clear media type */
gst_mf_transform_set_output_type (transform, NULL);
}
if (max_width == 0 || max_height == 0) {
GST_WARNING_OBJECT (transform, "Couldn't query supported resolution");
goto done;
}
#else
/* FIXME: don't hardcode supported resolution */
max_width = max_height = 8192;
#endif
/* high profile supported since windows8 */
src_caps = gst_caps_from_string ("video/x-h264, "
"stream-format=(string) byte-stream, "
"alignment=(string) au");
gst_caps_set_value (src_caps, "profile", &profiles);
sink_caps = gst_caps_new_empty_simple ("video/x-raw");
gst_caps_set_value (sink_caps, "format", supported_formats);
g_value_unset (supported_formats);
g_free (supported_formats);
/* To cover both landscape and portrait, select max value */
resolution = MAX (max_width, max_height);
gst_caps_set_simple (sink_caps,
"width", GST_TYPE_INT_RANGE, 64, resolution,
"height", GST_TYPE_INT_RANGE, 64, resolution, NULL);
gst_caps_set_simple (src_caps,
"width", GST_TYPE_INT_RANGE, 64, resolution,
"height", GST_TYPE_INT_RANGE, 64, resolution, NULL);
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
#define CHECK_DEVICE_CAPS(codec_obj,api,val) \
if (SUCCEEDED((codec_obj)->IsSupported(&(api)))) {\
device_caps.val = TRUE; \
}
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonRateControlMode, rc_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonQuality, quality);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncAdaptiveMode, adaptive_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonBufferSize, buffer_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonMaxBitRate, max_bitrate);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncCommonQualityVsSpeed, quality_vs_speed);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264CABACEnable, cabac);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264SPSID, sps_id);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264PPSID, pps_id);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVDefaultBPictureCount, bframes);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVGOPSize, gop_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncNumWorkerThreads, threads);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoContentType, content_type);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoEncodeQP, qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoForceKeyFrame, force_keyframe);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVLowLatencyMode, low_latency);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMinQP, min_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxQP, max_qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoEncodeFrameTypeQP, frame_type_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxNumRefFrame, max_num_ref);
if (device_caps.max_num_ref) {
VARIANT min;
VARIANT max;
VARIANT step;
hr = codec_api->GetParameterRange (&CODECAPI_AVEncVideoMaxNumRefFrame,
&min, &max, &step);
if (SUCCEEDED (hr)) {
device_caps.max_num_ref = TRUE;
device_caps.max_num_ref_high = max.uiVal;
device_caps.max_num_ref_low = min.uiVal;
VariantClear (&min);
VariantClear (&max);
VariantClear (&step);
}
}
gst_mf_h264_enc_register (plugin, rank, device_name,
&device_caps, enum_flags, device_index, sink_caps, src_caps);
done:
g_value_unset (&profiles);
g_free (device_name);
}
void
gst_mf_h264_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GstMFTransformEnumParams enum_params = { 0, };
MFT_REGISTER_TYPE_INFO output_type;
GstMFTransform *transform;
gint i;
gboolean do_next;
GUID subtype = MFVideoFormat_H264;
GST_DEBUG_CATEGORY_INIT (gst_mf_h264_enc_debug, "mfh264enc", 0, "mfh264enc");
output_type.guidMajorType = MFMediaType_Video;
output_type.guidSubtype = MFVideoFormat_H264;
enum_params.category = MFT_CATEGORY_VIDEO_ENCODER;
enum_params.enum_flags = (MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_ASYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
enum_params.output_typeinfo = &output_type;
/* register hardware encoders first */
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_h264_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
/* register software encoders */
enum_params.enum_flags = (MFT_ENUM_FLAG_SYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_h264_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info);
}

View file

@ -136,30 +136,6 @@ enum
#define DEFAULT_QP_B 26
#define DEFAULT_REF 2
#define GST_MF_H265_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), G_TYPE_FROM_INSTANCE (obj), GstMFH265EncClass))
typedef struct _GstMFH265EncDeviceCaps
{
gboolean rc_mode; /* AVEncCommonRateControlMode */
gboolean buffer_size; /* AVEncCommonBufferSize */
gboolean max_bitrate; /* AVEncCommonMaxBitRate */
gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */
gboolean bframes; /* AVEncMPVDefaultBPictureCount */
gboolean gop_size; /* AVEncMPVGOPSize */
gboolean threads; /* AVEncNumWorkerThreads */
gboolean content_type; /* AVEncVideoContentType */
gboolean qp; /* AVEncVideoEncodeQP */
gboolean force_keyframe; /* AVEncVideoForceKeyFrame */
gboolean low_latency; /* AVLowLatencyMode */
gboolean min_qp; /* AVEncVideoMinQP */
gboolean max_qp; /* AVEncVideoMaxQP */
gboolean frame_type_qp; /* AVEncVideoEncodeFrameTypeQP */
gboolean max_num_ref; /* AVEncVideoMaxNumRefFrame */
guint max_num_ref_high;
guint max_num_ref_low;
} GstMFH265EncDeviceCaps;
typedef struct _GstMFH265Enc
{
GstMFVideoEnc parent;
@ -189,21 +165,8 @@ typedef struct _GstMFH265Enc
typedef struct _GstMFH265EncClass
{
GstMFVideoEncClass parent_class;
GstMFH265EncDeviceCaps device_caps;
} GstMFH265EncClass;
typedef struct
{
GstCaps *sink_caps;
GstCaps *src_caps;
gchar *device_name;
guint32 enum_flags;
guint device_index;
GstMFH265EncDeviceCaps device_caps;
gboolean is_default;
} GstMFH265EncClassData;
static GstElementClass *parent_class = NULL;
static void gst_mf_h265_enc_get_property (GObject * object, guint prop_id,
@ -221,13 +184,12 @@ gst_mf_h265_enc_class_init (GstMFH265EncClass * klass, gpointer data)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
GstMFH265EncClassData *cdata = (GstMFH265EncClassData *) data;
GstMFH265EncDeviceCaps *device_caps = &cdata->device_caps;
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
gchar *long_name;
gchar *classification;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
klass->device_caps = *device_caps;
gobject_class->get_property = gst_mf_h265_enc_get_property;
gobject_class->set_property = gst_mf_h265_enc_set_property;
@ -413,7 +375,7 @@ gst_mf_h265_enc_class_init (GstMFH265EncClass * klass, gpointer data)
mfenc_class->codec_id = MFVideoFormat_HEVC;
mfenc_class->enum_flags = cdata->enum_flags;
mfenc_class->device_index = cdata->device_index;
mfenc_class->can_force_keyframe = device_caps->force_keyframe;
mfenc_class->device_caps = *device_caps;
g_free (cdata->device_name);
gst_caps_unref (cdata->sink_caps);
@ -607,8 +569,8 @@ static gboolean
gst_mf_h265_enc_set_option (GstMFVideoEnc * mfenc, IMFMediaType * output_type)
{
GstMFH265Enc *self = (GstMFH265Enc *) mfenc;
GstMFH265EncClass *klass = GST_MF_H265_ENC_GET_CLASS (self);
GstMFH265EncDeviceCaps *device_caps = &klass->device_caps;
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
HRESULT hr;
GstMFTransform *transform = mfenc->transform;
@ -768,21 +730,11 @@ gst_mf_h265_enc_set_src_caps (GstMFVideoEnc * mfenc,
gst_tag_list_unref (tags);
return TRUE;
}
static void
gst_mf_h265_enc_register (GstPlugin * plugin, guint rank,
const gchar * device_name, const GstMFH265EncDeviceCaps * device_caps,
guint32 enum_flags, guint device_index,
GstCaps * sink_caps, GstCaps * src_caps)
void
gst_mf_h265_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GType type;
gchar *type_name;
gchar *feature_name;
gint i;
GstMFH265EncClassData *cdata;
gboolean is_default = TRUE;
GTypeInfo type_info = {
sizeof (GstMFH265EncClass),
NULL,
@ -794,380 +746,9 @@ gst_mf_h265_enc_register (GstPlugin * plugin, guint rank,
0,
(GInstanceInitFunc) gst_mf_h265_enc_init,
};
cdata = g_new0 (GstMFH265EncClassData, 1);
cdata->sink_caps = sink_caps;
cdata->src_caps = src_caps;
cdata->device_name = g_strdup (device_name);
cdata->device_caps = *device_caps;
cdata->enum_flags = enum_flags;
cdata->device_index = device_index;
type_info.class_data = cdata;
type_name = g_strdup ("GstMFH265Enc");
feature_name = g_strdup ("mfh265enc");
i = 1;
while (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstMFH265Device%dEnc", i);
feature_name = g_strdup_printf ("mfh265device%denc", i);
is_default = FALSE;
i++;
}
cdata->is_default = is_default;
type =
g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name, &type_info,
(GTypeFlags) 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}
typedef struct
{
guint width;
guint height;
} GstMFH265EncResolution;
typedef struct
{
eAVEncH265VProfile profile;
const gchar *profile_str;
} GStMFH265EncProfileMap;
static void
gst_mf_h265_enc_plugin_init_internal (GstPlugin * plugin, guint rank,
GstMFTransform * transform, guint device_index, guint32 enum_flags)
{
HRESULT hr;
MFT_REGISTER_TYPE_INFO *infos;
UINT32 info_size;
gint i;
GstCaps *src_caps = NULL;
GstCaps *sink_caps = NULL;
GValue *supported_formats = NULL;
gboolean have_I420 = FALSE;
gchar *device_name = NULL;
GstMFH265EncDeviceCaps device_caps = { 0, };
IMFActivate *activate;
IMFTransform *encoder;
ICodecAPI *codec_api;
ComPtr<IMFMediaType> out_type;
GstMFH265EncResolution resolutions_to_check[] = {
{1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160}, {8192, 4320}
};
guint max_width = 0;
guint max_height = 0;
guint resolution;
GStMFH265EncProfileMap profiles_to_check[] = {
{ eAVEncH265VProfile_Main_420_8, "main" },
{ eAVEncH265VProfile_Main_420_10, "main-10" },
};
guint num_profiles = 0;
GValue profiles = G_VALUE_INIT;
/* NOTE: depending on environment,
* some enumerated h/w MFT might not be usable (e.g., multiple GPU case) */
if (!gst_mf_transform_open (transform))
return;
activate = gst_mf_transform_get_activate_handle (transform);
if (!activate) {
GST_WARNING_OBJECT (transform, "No IMFActivate interface available");
return;
}
encoder = gst_mf_transform_get_transform_handle (transform);
if (!encoder) {
GST_WARNING_OBJECT (transform, "No IMFTransform interface available");
return;
}
codec_api = gst_mf_transform_get_codec_api_handle (transform);
if (!codec_api) {
GST_WARNING_OBJECT (transform, "No ICodecAPI interface available");
return;
}
g_object_get (transform, "device-name", &device_name, NULL);
if (!device_name) {
GST_WARNING_OBJECT (transform, "Unknown device name");
return;
}
g_value_init (&profiles, GST_TYPE_LIST);
hr = activate->GetAllocatedBlob (MFT_INPUT_TYPES_Attributes,
(UINT8 **) & infos, &info_size);
if (!gst_mf_result (hr))
goto done;
for (i = 0; i < info_size / sizeof (MFT_REGISTER_TYPE_INFO); i++) {
GstVideoFormat vformat;
GValue val = G_VALUE_INIT;
vformat = gst_mf_video_subtype_to_video_format (&infos[i].guidSubtype);
if (vformat == GST_VIDEO_FORMAT_UNKNOWN)
continue;
if (!supported_formats) {
supported_formats = g_new0 (GValue, 1);
g_value_init (supported_formats, GST_TYPE_LIST);
}
/* media foundation has duplicated formats IYUV and I420 */
if (vformat == GST_VIDEO_FORMAT_I420) {
if (have_I420)
continue;
have_I420 = TRUE;
}
g_value_init (&val, G_TYPE_STRING);
g_value_set_static_string (&val, gst_video_format_to_string (vformat));
gst_value_list_append_and_take_value (supported_formats, &val);
}
CoTaskMemFree (infos);
if (!supported_formats)
goto done;
/* check supported resolutions */
hr = MFCreateMediaType (out_type.GetAddressOf ());
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_HEVC);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_AVG_BITRATE, 2048000);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, eAVEncH265VProfile_Main_420_8);
if (!gst_mf_result (hr))
goto done;
hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, 30, 1);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (!gst_mf_result (hr))
goto done;
GST_DEBUG_OBJECT (transform, "Check supported profiles of %s",
device_name);
for (i = 0; i < G_N_ELEMENTS (profiles_to_check); i++) {
GValue profile_val = G_VALUE_INIT;
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE,
profiles_to_check[i].profile);
if (!gst_mf_result (hr))
goto done;
hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE,
resolutions_to_check[0].width, resolutions_to_check[0].height);
if (!gst_mf_result (hr))
break;
if (!gst_mf_transform_set_output_type (transform, out_type.Get ()))
break;
GST_DEBUG_OBJECT (transform, "MFT supports h265 %s profile",
profiles_to_check[i].profile_str);
g_value_init (&profile_val, G_TYPE_STRING);
g_value_set_static_string (&profile_val, profiles_to_check[i].profile_str);
gst_value_list_append_and_take_value (&profiles, &profile_val);
num_profiles++;
/* clear media type */
gst_mf_transform_set_output_type (transform, NULL);
}
if (num_profiles == 0) {
GST_WARNING_OBJECT (transform, "Couldn't query supported profile");
goto done;
}
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, eAVEncH265VProfile_Main_420_8);
if (!gst_mf_result (hr))
goto done;
/* FIXME: This would take so long time.
* Need to find smart way to find supported resolution*/
#if 0
for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
guint width, height;
width = resolutions_to_check[i].width;
height = resolutions_to_check[i].height;
hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, width, height);
if (!gst_mf_result (hr))
break;
if (!gst_mf_transform_set_output_type (transform, out_type.Get ()))
break;
max_width = width;
max_height = height;
GST_DEBUG_OBJECT (transform,
"MFT supports resolution %dx%d", max_width, max_height);
/* clear media type */
gst_mf_transform_set_output_type (transform, NULL);
}
if (max_width == 0 || max_height == 0) {
GST_WARNING_OBJECT (transform, "Couldn't query supported resolution");
goto done;
}
#else
/* FIXME: don't hardcode supported resolution */
max_width = max_height = 8192;
#endif
src_caps = gst_caps_from_string ("video/x-h265, "
"stream-format=(string) byte-stream, "
"alignment=(string) au");
gst_caps_set_value (src_caps, "profile", &profiles);
sink_caps = gst_caps_new_empty_simple ("video/x-raw");
gst_caps_set_value (sink_caps, "format", supported_formats);
g_value_unset (supported_formats);
g_free (supported_formats);
/* To cover both landscape and portrait, select max value */
resolution = MAX (max_width, max_height);
gst_caps_set_simple (sink_caps,
"width", GST_TYPE_INT_RANGE, 64, resolution,
"height", GST_TYPE_INT_RANGE, 64, resolution, NULL);
gst_caps_set_simple (src_caps,
"width", GST_TYPE_INT_RANGE, 64, resolution,
"height", GST_TYPE_INT_RANGE, 64, resolution, NULL);
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
#define CHECK_DEVICE_CAPS(codec_obj,api,val) \
if (SUCCEEDED((codec_obj)->IsSupported(&(api)))) {\
device_caps.val = TRUE; \
}
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonRateControlMode, rc_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonBufferSize, buffer_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonMaxBitRate, max_bitrate);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncCommonQualityVsSpeed, quality_vs_speed);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVDefaultBPictureCount, bframes);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVGOPSize, gop_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncNumWorkerThreads, threads);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoContentType, content_type);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoEncodeQP, qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoForceKeyFrame, force_keyframe);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVLowLatencyMode, low_latency);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMinQP, min_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxQP, max_qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoEncodeFrameTypeQP, frame_type_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxNumRefFrame, max_num_ref);
if (device_caps.max_num_ref) {
VARIANT min;
VARIANT max;
VARIANT step;
hr = codec_api->GetParameterRange (&CODECAPI_AVEncVideoMaxNumRefFrame,
&min, &max, &step);
if (SUCCEEDED (hr)) {
device_caps.max_num_ref = TRUE;
device_caps.max_num_ref_high = max.uiVal;
device_caps.max_num_ref_low = min.uiVal;
VariantClear (&min);
VariantClear (&max);
VariantClear (&step);
}
}
gst_mf_h265_enc_register (plugin, rank, device_name,
&device_caps, enum_flags, device_index, sink_caps, src_caps);
done:
g_value_unset (&profiles);
g_free (device_name);
}
void
gst_mf_h265_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GstMFTransformEnumParams enum_params = { 0, };
MFT_REGISTER_TYPE_INFO output_type;
GstMFTransform *transform;
gint i;
gboolean do_next;
GUID subtype = MFVideoFormat_HEVC;
GST_DEBUG_CATEGORY_INIT (gst_mf_h265_enc_debug, "mfh265enc", 0, "mfh265enc");
output_type.guidMajorType = MFMediaType_Video;
output_type.guidSubtype = MFVideoFormat_HEVC;
enum_params.category = MFT_CATEGORY_VIDEO_ENCODER;
enum_params.enum_flags = (MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_ASYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
enum_params.output_typeinfo = &output_type;
/* register hardware encoders first */
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_h265_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
/* register software encoders */
enum_params.enum_flags = (MFT_ENUM_FLAG_SYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_h265_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
}
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info);
}

View file

@ -26,17 +26,20 @@
#include "gstmfvideoenc.h"
#include <wrl.h>
#include "gstmfvideobuffer.h"
#include <string.h>
using namespace Microsoft::WRL;
GST_DEBUG_CATEGORY (gst_mf_video_enc_debug);
G_BEGIN_DECLS
GST_DEBUG_CATEGORY_EXTERN (gst_mf_video_enc_debug);
#define GST_CAT_DEFAULT gst_mf_video_enc_debug
G_END_DECLS
#define gst_mf_video_enc_parent_class parent_class
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstMFVideoEnc, gst_mf_video_enc,
GST_TYPE_VIDEO_ENCODER,
GST_DEBUG_CATEGORY_INIT (gst_mf_video_enc_debug, "mfvideoenc", 0,
"mfvideoenc"));
G_DEFINE_ABSTRACT_TYPE (GstMFVideoEnc, gst_mf_video_enc,
GST_TYPE_VIDEO_ENCODER);
static gboolean gst_mf_video_enc_open (GstVideoEncoder * enc);
static gboolean gst_mf_video_enc_close (GstVideoEncoder * enc);
@ -494,7 +497,7 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
goto error;
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
if (klass->can_force_keyframe) {
if (klass->device_caps.force_keyframe) {
unset_force_keyframe =
gst_mf_transform_set_codec_api_uint32 (self->transform,
&CODECAPI_AVEncVideoForceKeyFrame, TRUE);
@ -789,3 +792,447 @@ gst_mf_video_on_new_sample (GstMFTransform * object,
return S_OK;
}
typedef struct
{
guint profile;
const gchar *profile_str;
} GstMFVideoEncProfileMap;
static void
gst_mf_video_enc_enum_internal (GstMFTransform * transform, GUID &subtype,
GstMFVideoEncDeviceCaps * device_caps, GstCaps ** sink_template,
GstCaps ** src_template)
{
HRESULT hr;
MFT_REGISTER_TYPE_INFO *infos;
UINT32 info_size;
gint i;
GstCaps *src_caps = NULL;
GstCaps *sink_caps = NULL;
GValue *supported_formats = NULL;
GValue *profiles = NULL;
gboolean have_I420 = FALSE;
gchar *device_name = NULL;
IMFActivate *activate;
IMFTransform *encoder;
ICodecAPI *codec_api;
ComPtr<IMFMediaType> out_type;
GstMFVideoEncProfileMap h264_profile_map[] = {
{ eAVEncH264VProfile_High, "high" },
{ eAVEncH264VProfile_Main, "main" },
{ eAVEncH264VProfile_Base, "baseline" },
{ 0, NULL },
};
GstMFVideoEncProfileMap hevc_profile_map[] = {
{ eAVEncH265VProfile_Main_420_8, "main" },
{ eAVEncH265VProfile_Main_420_10, "main-10" },
{ 0, NULL },
};
GstMFVideoEncProfileMap *profile_to_check = NULL;
static gchar *h264_caps_str =
"video/x-h264, stream-format=(string) byte-stream, alignment=(string) au";
static gchar *hevc_caps_str =
"video/x-h265, stream-format=(string) byte-stream, alignment=(string) au";
static gchar *vp9_caps_str = "video/x-vp9";
static gchar *codec_caps_str = NULL;
/* NOTE: depending on environment,
* some enumerated h/w MFT might not be usable (e.g., multiple GPU case) */
if (!gst_mf_transform_open (transform))
return;
activate = gst_mf_transform_get_activate_handle (transform);
if (!activate) {
GST_WARNING_OBJECT (transform, "No IMFActivate interface available");
return;
}
encoder = gst_mf_transform_get_transform_handle (transform);
if (!encoder) {
GST_WARNING_OBJECT (transform, "No IMFTransform interface available");
return;
}
codec_api = gst_mf_transform_get_codec_api_handle (transform);
if (!codec_api) {
GST_WARNING_OBJECT (transform, "No ICodecAPI interface available");
return;
}
g_object_get (transform, "device-name", &device_name, NULL);
if (!device_name) {
GST_WARNING_OBJECT (transform, "Unknown device name");
return;
}
g_free (device_name);
hr = activate->GetAllocatedBlob (MFT_INPUT_TYPES_Attributes,
(UINT8 **) & infos, &info_size);
if (!gst_mf_result (hr))
return;
for (i = 0; i < info_size / sizeof (MFT_REGISTER_TYPE_INFO); i++) {
GstVideoFormat format;
GValue val = G_VALUE_INIT;
format = gst_mf_video_subtype_to_video_format (&infos[i].guidSubtype);
if (format == GST_VIDEO_FORMAT_UNKNOWN)
continue;
if (!supported_formats) {
supported_formats = g_new0 (GValue, 1);
g_value_init (supported_formats, GST_TYPE_LIST);
}
/* media foundation has duplicated formats IYUV and I420 */
if (format == GST_VIDEO_FORMAT_I420) {
if (have_I420)
continue;
have_I420 = TRUE;
}
g_value_init (&val, G_TYPE_STRING);
g_value_set_static_string (&val, gst_video_format_to_string (format));
gst_value_list_append_and_take_value (supported_formats, &val);
}
CoTaskMemFree (infos);
if (!supported_formats) {
GST_WARNING_OBJECT (transform, "Couldn't figure out supported format");
return;
}
if (IsEqualGUID (MFVideoFormat_H264, subtype)) {
profile_to_check = h264_profile_map;
codec_caps_str = h264_caps_str;
} else if (IsEqualGUID (MFVideoFormat_HEVC, subtype)) {
profile_to_check = hevc_profile_map;
codec_caps_str = hevc_caps_str;
} else if (IsEqualGUID (MFVideoFormat_VP90, subtype)) {
codec_caps_str = vp9_caps_str;
} else {
g_assert_not_reached ();
return;
}
if (profile_to_check) {
hr = MFCreateMediaType (&out_type);
if (!gst_mf_result (hr))
return;
hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (!gst_mf_result (hr))
return;
hr = out_type->SetGUID (MF_MT_SUBTYPE, subtype);
if (!gst_mf_result (hr))
return;
hr = out_type->SetUINT32 (MF_MT_AVG_BITRATE, 2048000);
if (!gst_mf_result (hr))
return;
hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, 30, 1);
if (!gst_mf_result (hr))
return;
hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (!gst_mf_result (hr))
return;
hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, 1920, 1080);
if (!gst_mf_result (hr))
return;
i = 0;
do {
GValue profile_val = G_VALUE_INIT;
guint mf_profile = profile_to_check[i].profile;
const gchar *profile_str = profile_to_check[i].profile_str;
i++;
if (mf_profile == 0)
break;
g_assert (profile_str != NULL);
hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, mf_profile);
if (!gst_mf_result (hr))
return;
if (!gst_mf_transform_set_output_type (transform, out_type.Get ()))
continue;
if (!profiles) {
profiles = g_new0 (GValue, 1);
g_value_init (profiles, GST_TYPE_LIST);
}
g_value_init (&profile_val, G_TYPE_STRING);
g_value_set_static_string (&profile_val, profile_str);
gst_value_list_append_and_take_value (profiles, &profile_val);
} while (1);
if (!profiles) {
GST_WARNING_OBJECT (transform, "Couldn't query supported profile");
return;
}
}
src_caps = gst_caps_from_string (codec_caps_str);
if (profiles) {
gst_caps_set_value (src_caps, "profile", profiles);
g_value_unset (profiles);
g_free (profiles);
}
sink_caps = gst_caps_new_empty_simple ("video/x-raw");
gst_caps_set_value (sink_caps, "format", supported_formats);
g_value_unset (supported_formats);
g_free (supported_formats);
/* FIXME: don't hardcode max resolution, but MF doesn't provide
* API for querying supported max resolution... */
gst_caps_set_simple (sink_caps,
"width", GST_TYPE_INT_RANGE, 64, 8192,
"height", GST_TYPE_INT_RANGE, 64, 8192, NULL);
gst_caps_set_simple (src_caps,
"width", GST_TYPE_INT_RANGE, 64, 8192,
"height", GST_TYPE_INT_RANGE, 64, 8192, NULL);
*sink_template = sink_caps;
*src_template = src_caps;
#define CHECK_DEVICE_CAPS(codec_obj,api,val) \
if (SUCCEEDED((codec_obj)->IsSupported(&(api)))) {\
(device_caps)->val = TRUE; \
}
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonRateControlMode, rc_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonQuality, quality);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncAdaptiveMode, adaptive_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonBufferSize, buffer_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonMaxBitRate, max_bitrate);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncCommonQualityVsSpeed, quality_vs_speed);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264CABACEnable, cabac);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264SPSID, sps_id);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264PPSID, pps_id);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVDefaultBPictureCount, bframes);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVGOPSize, gop_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncNumWorkerThreads, threads);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoContentType, content_type);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoEncodeQP, qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoForceKeyFrame, force_keyframe);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVLowLatencyMode, low_latency);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMinQP, min_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxQP, max_qp);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoEncodeFrameTypeQP, frame_type_qp);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxNumRefFrame, max_num_ref);
if (device_caps->max_num_ref) {
VARIANT min;
VARIANT max;
VARIANT step;
hr = codec_api->GetParameterRange (&CODECAPI_AVEncVideoMaxNumRefFrame,
&min, &max, &step);
if (SUCCEEDED (hr)) {
device_caps->max_num_ref_high = max.uiVal;
device_caps->max_num_ref_low = min.uiVal;
VariantClear (&min);
VariantClear (&max);
VariantClear (&step);
} else {
device_caps->max_num_ref = FALSE;
}
}
#undef CHECK_DEVICE_CAPS
return;
}
static GstMFTransform *
gst_mf_video_enc_enum (guint enum_flags, GUID * subtype, guint device_index,
GstMFVideoEncDeviceCaps * device_caps, GstCaps ** sink_template,
GstCaps ** src_template)
{
GstMFTransformEnumParams enum_params = { 0, };
MFT_REGISTER_TYPE_INFO output_type;
GstMFTransform *transform;
*sink_template = NULL;
*src_template = NULL;
memset (device_caps, 0, sizeof (GstMFVideoEncDeviceCaps));
if (!IsEqualGUID (MFVideoFormat_H264, *subtype) &&
!IsEqualGUID (MFVideoFormat_HEVC, *subtype) &&
!IsEqualGUID (MFVideoFormat_VP90, *subtype)) {
GST_ERROR ("Unknown subtype GUID");
return NULL;
}
output_type.guidMajorType = MFMediaType_Video;
output_type.guidSubtype = *subtype;
enum_params.category = MFT_CATEGORY_VIDEO_ENCODER;
enum_params.output_typeinfo = &output_type;
enum_params.device_index = device_index;
enum_params.enum_flags = enum_flags;
transform = gst_mf_transform_new (&enum_params);
if (!transform)
return NULL;
gst_mf_video_enc_enum_internal (transform, output_type.guidSubtype,
device_caps, sink_template, src_template);
return transform;
}
static void
gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
GUID * subtype, GTypeInfo * type_info,
const GstMFVideoEncDeviceCaps * device_caps,
guint32 enum_flags, guint device_index, GstMFTransform * transform,
GstCaps * sink_caps, GstCaps * src_caps)
{
GType type;
GTypeInfo local_type_info;
gchar *type_name;
gchar *feature_name;
gint i;
GstMFVideoEncClassData *cdata;
gboolean is_default = TRUE;
gchar *device_name = NULL;
static gchar *type_name_prefix = NULL;
static gchar *feature_name_prefix = NULL;
if (IsEqualGUID (MFVideoFormat_H264, *subtype)) {
type_name_prefix = "H264";
feature_name_prefix = "h264";
} else if (IsEqualGUID (MFVideoFormat_HEVC, *subtype)) {
type_name_prefix = "H265";
feature_name_prefix = "h265";
} else if (IsEqualGUID (MFVideoFormat_VP90, *subtype)) {
type_name_prefix = "VP9";
feature_name_prefix = "vp9";
} else {
g_assert_not_reached ();
return;
}
/* Must be checked already */
g_object_get (transform, "device-name", &device_name, NULL);
g_assert (device_name != NULL);
cdata = g_new0 (GstMFVideoEncClassData, 1);
cdata->sink_caps = gst_caps_copy (sink_caps);
cdata->src_caps = gst_caps_copy (src_caps);
cdata->device_name = device_name;
cdata->device_caps = *device_caps;
cdata->enum_flags = enum_flags;
cdata->device_index = device_index;
local_type_info = *type_info;
local_type_info.class_data = cdata;
GST_MINI_OBJECT_FLAG_SET (cdata->sink_caps,
GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (cdata->src_caps,
GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
type_name = g_strdup_printf ("GstMF%sEnc", type_name_prefix);
feature_name = g_strdup_printf ("mf%senc", feature_name_prefix);
i = 1;
while (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstMF%sDevice%dEnc", type_name_prefix, i);
feature_name = g_strdup_printf ("mf%sdevice%denc", feature_name_prefix, i);
is_default = FALSE;
i++;
}
cdata->is_default = is_default;
type =
g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name,
&local_type_info, (GTypeFlags) 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}
void
gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
GTypeInfo * type_info)
{
GstMFTransform *transform = NULL;
GstCaps *sink_template = NULL;
GstCaps *src_template = NULL;
guint enum_flags;
GstMFVideoEncDeviceCaps device_caps;
guint i;
/* register hardware encoders first */
enum_flags = (MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_ASYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
/* AMD seems to be able to support up to 12 GPUs */
for (i = 0; i < 12 ; i++) {
transform = gst_mf_video_enc_enum (enum_flags, subtype, i, &device_caps,
&sink_template, &src_template);
/* No more MFT to enumerate */
if (!transform)
break;
/* Failed to open MFT */
if (!sink_template) {
gst_clear_object (&transform);
continue;
}
gst_mf_video_enc_register_internal (plugin, rank, subtype,
type_info, &device_caps, enum_flags, i, transform,
sink_template, src_template);
gst_clear_object (&transform);
gst_clear_caps (&sink_template);
gst_clear_caps (&src_template);
}
/* register software encoders */
enum_flags = (MFT_ENUM_FLAG_SYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
transform = gst_mf_video_enc_enum (enum_flags, subtype, 0, &device_caps,
&sink_template, &src_template);
if (!transform)
goto done;
if (!sink_template)
goto done;
gst_mf_video_enc_register_internal (plugin, rank, subtype, type_info,
&device_caps, enum_flags, i, transform, sink_template, src_template);
done:
gst_clear_object (&transform);
gst_clear_caps (&sink_template);
gst_clear_caps (&src_template);
}

View file

@ -37,6 +37,47 @@ G_BEGIN_DECLS
typedef struct _GstMFVideoEnc GstMFVideoEnc;
typedef struct _GstMFVideoEncClass GstMFVideoEncClass;
typedef struct _GstMFVideoEncDeviceCaps GstMFVideoEncDeviceCaps;
typedef struct _GstMFVideoEncClassData GstMFVideoEncClassData;
struct _GstMFVideoEncDeviceCaps
{
gboolean rc_mode; /* AVEncCommonRateControlMode */
gboolean quality; /* AVEncCommonQuality */
gboolean adaptive_mode; /* AVEncAdaptiveMode */
gboolean buffer_size; /* AVEncCommonBufferSize */
gboolean max_bitrate; /* AVEncCommonMaxBitRate */
gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */
gboolean cabac; /* AVEncH264CABACEnable */
gboolean sps_id; /* AVEncH264SPSID */
gboolean pps_id; /* AVEncH264PPSID */
gboolean bframes; /* AVEncMPVDefaultBPictureCount */
gboolean gop_size; /* AVEncMPVGOPSize */
gboolean threads; /* AVEncNumWorkerThreads */
gboolean content_type; /* AVEncVideoContentType */
gboolean qp; /* AVEncVideoEncodeQP */
gboolean force_keyframe; /* AVEncVideoForceKeyFrame */
gboolean low_latency; /* AVLowLatencyMode */
gboolean min_qp; /* AVEncVideoMinQP */
gboolean max_qp; /* AVEncVideoMaxQP */
gboolean frame_type_qp; /* AVEncVideoEncodeFrameTypeQP */
gboolean max_num_ref; /* AVEncVideoMaxNumRefFrame */
guint max_num_ref_high;
guint max_num_ref_low;
};
struct _GstMFVideoEncClassData
{
GstCaps *sink_caps;
GstCaps *src_caps;
gchar *device_name;
guint32 enum_flags;
guint device_index;
GstMFVideoEncDeviceCaps device_caps;
gboolean is_default;
};
struct _GstMFVideoEnc
{
@ -53,10 +94,11 @@ struct _GstMFVideoEncClass
{
GstVideoEncoderClass parent_class;
GUID codec_id;
guint32 enum_flags;
guint device_index;
gboolean can_force_keyframe;
/* Set by subclass */
GUID codec_id; /* Output subtype of MFT */
guint32 enum_flags; /* MFT_ENUM_FLAG */
guint device_index; /* Index of enumerated IMFActivate via MFTEnum */
GstMFVideoEncDeviceCaps device_caps;
gboolean (*set_option) (GstMFVideoEnc * mfenc,
IMFMediaType * output_type);
@ -68,6 +110,11 @@ struct _GstMFVideoEncClass
GType gst_mf_video_enc_get_type (void);
void gst_mf_video_enc_register (GstPlugin * plugin,
guint rank,
GUID * subtype,
GTypeInfo * type_info);
G_END_DECLS
#endif /* __GST_MF_VIDEO_ENC_H__ */

View file

@ -116,21 +116,6 @@ enum
#define DEFAULT_CONTENT_TYPE GST_MF_VP9_ENC_CONTENT_TYPE_UNKNOWN
#define DEFAULT_LOW_LATENCY FALSE
#define GST_MF_VP9_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), G_TYPE_FROM_INSTANCE (obj), GstMFVP9EncClass))
typedef struct _GstMFVP9EncDeviceCaps
{
gboolean rc_mode; /* AVEncCommonRateControlMode */
gboolean max_bitrate; /* AVEncCommonMaxBitRate */
gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */
gboolean gop_size; /* AVEncMPVGOPSize */
gboolean threads; /* AVEncNumWorkerThreads */
gboolean content_type; /* AVEncVideoContentType */
gboolean force_keyframe; /* AVEncVideoForceKeyFrame */
gboolean low_latency; /* AVLowLatencyMode */
} GstMFVP9EncDeviceCaps;
typedef struct _GstMFVP9Enc
{
GstMFVideoEnc parent;
@ -151,21 +136,8 @@ typedef struct _GstMFVP9Enc
typedef struct _GstMFVP9EncClass
{
GstMFVideoEncClass parent_class;
GstMFVP9EncDeviceCaps device_caps;
} GstMFVP9EncClass;
typedef struct
{
GstCaps *sink_caps;
GstCaps *src_caps;
gchar *device_name;
guint32 enum_flags;
guint device_index;
GstMFVP9EncDeviceCaps device_caps;
gboolean is_default;
} GstMFVP9EncClassData;
static GstElementClass *parent_class = NULL;
static void gst_mf_vp9_enc_get_property (GObject * object, guint prop_id,
@ -183,13 +155,12 @@ gst_mf_vp9_enc_class_init (GstMFVP9EncClass * klass, gpointer data)
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
GstMFVP9EncClassData *cdata = (GstMFVP9EncClassData *) data;
GstMFVP9EncDeviceCaps *device_caps = &cdata->device_caps;
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
gchar *long_name;
gchar *classification;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
klass->device_caps = *device_caps;
gobject_class->get_property = gst_mf_vp9_enc_get_property;
gobject_class->set_property = gst_mf_vp9_enc_set_property;
@ -302,7 +273,7 @@ gst_mf_vp9_enc_class_init (GstMFVP9EncClass * klass, gpointer data)
mfenc_class->codec_id = MFVideoFormat_VP90;
mfenc_class->enum_flags = cdata->enum_flags;
mfenc_class->device_index = cdata->device_index;
mfenc_class->can_force_keyframe = device_caps->force_keyframe;
mfenc_class->device_caps = *device_caps;
g_free (cdata->device_name);
gst_caps_unref (cdata->sink_caps);
@ -434,8 +405,8 @@ static gboolean
gst_mf_vp9_enc_set_option (GstMFVideoEnc * mfenc, IMFMediaType * output_type)
{
GstMFVP9Enc *self = (GstMFVP9Enc *) mfenc;
GstMFVP9EncClass *klass = GST_MF_VP9_ENC_GET_CLASS (self);
GstMFVP9EncDeviceCaps *device_caps = &klass->device_caps;
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
HRESULT hr;
GstMFTransform *transform = mfenc->transform;
@ -536,21 +507,11 @@ gst_mf_vp9_enc_set_src_caps (GstMFVideoEnc * mfenc,
gst_tag_list_unref (tags);
return TRUE;
}
static void
gst_mf_vp9_enc_register (GstPlugin * plugin, guint rank,
const gchar * device_name, const GstMFVP9EncDeviceCaps * device_caps,
guint32 enum_flags, guint device_index,
GstCaps * sink_caps, GstCaps * src_caps)
void
gst_mf_vp9_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GType type;
gchar *type_name;
gchar *feature_name;
gint i;
GstMFVP9EncClassData *cdata;
gboolean is_default = TRUE;
GTypeInfo type_info = {
sizeof (GstMFVP9EncClass),
NULL,
@ -562,249 +523,9 @@ gst_mf_vp9_enc_register (GstPlugin * plugin, guint rank,
0,
(GInstanceInitFunc) gst_mf_vp9_enc_init,
};
cdata = g_new0 (GstMFVP9EncClassData, 1);
cdata->sink_caps = sink_caps;
cdata->src_caps = src_caps;
cdata->device_name = g_strdup (device_name);
cdata->device_caps = *device_caps;
cdata->enum_flags = enum_flags;
cdata->device_index = device_index;
type_info.class_data = cdata;
type_name = g_strdup ("GstMFVP9Enc");
feature_name = g_strdup ("mfvp9enc");
i = 1;
while (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstMFVP9Device%dEnc", i);
feature_name = g_strdup_printf ("mfvp9device%denc", i);
is_default = FALSE;
i++;
}
cdata->is_default = is_default;
type =
g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name, &type_info,
(GTypeFlags) 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}
static void
gst_mf_vp9_enc_plugin_init_internal (GstPlugin * plugin, guint rank,
GstMFTransform * transform, guint device_index, guint32 enum_flags)
{
HRESULT hr;
MFT_REGISTER_TYPE_INFO *infos;
UINT32 info_size;
gint i;
GstCaps *src_caps = NULL;
GstCaps *sink_caps = NULL;
GValue *supported_formats = NULL;
gboolean have_I420 = FALSE;
gchar *device_name = NULL;
GstMFVP9EncDeviceCaps device_caps = { 0, };
IMFActivate *activate;
IMFTransform *encoder;
ICodecAPI *codec_api;
ComPtr<IMFMediaType> out_type;
/* NOTE: depending on environment,
* some enumerated h/w MFT might not be usable (e.g., multiple GPU case) */
if (!gst_mf_transform_open (transform))
return;
activate = gst_mf_transform_get_activate_handle (transform);
if (!activate) {
GST_WARNING_OBJECT (transform, "No IMFActivate interface available");
return;
}
encoder = gst_mf_transform_get_transform_handle (transform);
if (!encoder) {
GST_WARNING_OBJECT (transform, "No IMFTransform interface available");
return;
}
codec_api = gst_mf_transform_get_codec_api_handle (transform);
if (!codec_api) {
GST_WARNING_OBJECT (transform, "No ICodecAPI interface available");
return;
}
g_object_get (transform, "device-name", &device_name, NULL);
if (!device_name) {
GST_WARNING_OBJECT (transform, "Unknown device name");
return;
}
hr = activate->GetAllocatedBlob (MFT_INPUT_TYPES_Attributes,
(UINT8 **) & infos, &info_size);
if (!gst_mf_result (hr))
goto done;
for (i = 0; i < info_size / sizeof (MFT_REGISTER_TYPE_INFO); i++) {
GstVideoFormat vformat;
GValue val = G_VALUE_INIT;
vformat = gst_mf_video_subtype_to_video_format (&infos[i].guidSubtype);
if (vformat == GST_VIDEO_FORMAT_UNKNOWN)
continue;
if (!supported_formats) {
supported_formats = g_new0 (GValue, 1);
g_value_init (supported_formats, GST_TYPE_LIST);
}
/* media foundation has duplicated formats IYUV and I420 */
if (vformat == GST_VIDEO_FORMAT_I420) {
if (have_I420)
continue;
have_I420 = TRUE;
}
g_value_init (&val, G_TYPE_STRING);
g_value_set_static_string (&val, gst_video_format_to_string (vformat));
gst_value_list_append_and_take_value (supported_formats, &val);
}
CoTaskMemFree (infos);
if (!supported_formats)
goto done;
/* check supported resolutions */
hr = MFCreateMediaType (out_type.GetAddressOf ());
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_VP90);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_AVG_BITRATE, 2048000);
if (!gst_mf_result (hr))
goto done;
hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, 30, 1);
if (!gst_mf_result (hr))
goto done;
hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (!gst_mf_result (hr))
goto done;
GST_DEBUG_OBJECT (transform, "Check supported profiles of %s",
device_name);
src_caps = gst_caps_new_empty_simple ("video/x-vp9");
sink_caps = gst_caps_new_empty_simple ("video/x-raw");
gst_caps_set_value (sink_caps, "format", supported_formats);
g_value_unset (supported_formats);
g_free (supported_formats);
/* FIXME: don't hardcode resolution */
gst_caps_set_simple (sink_caps,
"width", GST_TYPE_INT_RANGE, 64, 8192,
"height", GST_TYPE_INT_RANGE, 64, 8192, NULL);
gst_caps_set_simple (src_caps,
"width", GST_TYPE_INT_RANGE, 64, 8192,
"height", GST_TYPE_INT_RANGE, 64, 8192, NULL);
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
#define CHECK_DEVICE_CAPS(codec_obj,api,val) \
if (SUCCEEDED((codec_obj)->IsSupported(&(api)))) {\
device_caps.val = TRUE; \
}
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonRateControlMode, rc_mode);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonMaxBitRate, max_bitrate);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncCommonQualityVsSpeed, quality_vs_speed);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVGOPSize, gop_size);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncNumWorkerThreads, threads);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoContentType, content_type);
CHECK_DEVICE_CAPS (codec_api,
CODECAPI_AVEncVideoForceKeyFrame, force_keyframe);
CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVLowLatencyMode, low_latency);
gst_mf_vp9_enc_register (plugin, rank, device_name,
&device_caps, enum_flags, device_index, sink_caps, src_caps);
done:
g_free (device_name);
}
void
gst_mf_vp9_enc_plugin_init (GstPlugin * plugin, guint rank)
{
GstMFTransformEnumParams enum_params = { 0, };
MFT_REGISTER_TYPE_INFO output_type;
GstMFTransform *transform;
gint i;
gboolean do_next;
GUID subtype = MFVideoFormat_VP90;
GST_DEBUG_CATEGORY_INIT (gst_mf_vp9_enc_debug, "mfvp9enc", 0, "mfvp9enc");
output_type.guidMajorType = MFMediaType_Video;
output_type.guidSubtype = MFVideoFormat_VP90;
enum_params.category = MFT_CATEGORY_VIDEO_ENCODER;
enum_params.enum_flags = (MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_ASYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
enum_params.output_typeinfo = &output_type;
/* register hardware encoders first */
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_vp9_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
/* register software encoders */
enum_params.enum_flags = (MFT_ENUM_FLAG_SYNCMFT |
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
i = 0;
do {
enum_params.device_index = i++;
transform = gst_mf_transform_new (&enum_params);
do_next = TRUE;
if (!transform) {
do_next = FALSE;
} else {
gst_mf_vp9_enc_plugin_init_internal (plugin, rank, transform,
enum_params.device_index, enum_params.enum_flags);
gst_clear_object (&transform);
}
} while (do_next);
}
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info);
}

View file

@ -41,6 +41,7 @@ GST_DEBUG_CATEGORY (gst_mf_utils_debug);
GST_DEBUG_CATEGORY (gst_mf_source_object_debug);
GST_DEBUG_CATEGORY (gst_mf_transform_debug);
GST_DEBUG_CATEGORY (gst_mf_video_buffer_debug);
GST_DEBUG_CATEGORY (gst_mf_video_enc_debug);
#define GST_CAT_DEFAULT gst_mf_debug
@ -71,6 +72,8 @@ plugin_init (GstPlugin * plugin)
"mftransform", 0, "mftransform");
GST_DEBUG_CATEGORY_INIT (gst_mf_video_buffer_debug,
"mfvideobuffer", 0, "mfvideobuffer");
GST_DEBUG_CATEGORY_INIT (gst_mf_video_enc_debug,
"mfvideoenc", 0, "mfvideoenc");
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
if (!gst_mf_result (hr)) {