From 9625d19279c65da1c38a2cd71254eba1ee32d6bf Mon Sep 17 00:00:00 2001 From: Seungha Yang Date: Tue, 7 Jan 2020 17:45:22 +0900 Subject: [PATCH] mediafoundation: Add h264 encoder Add Media Foundation h264 encoder. If hardware encoders are available on system, they will have higher rank than software encoder. Part-of: --- sys/mediafoundation/gstmfh264enc.cpp | 1395 ++++++++++++++++++++++++ sys/mediafoundation/gstmfh264enc.h | 33 + sys/mediafoundation/gstmftransform.cpp | 929 ++++++++++++++++ sys/mediafoundation/gstmftransform.h | 95 ++ sys/mediafoundation/gstmfvideoenc.cpp | 574 ++++++++++ sys/mediafoundation/gstmfvideoenc.h | 71 ++ sys/mediafoundation/gststrmif.h | 305 ++++++ sys/mediafoundation/meson.build | 24 +- sys/mediafoundation/plugin.c | 22 +- 9 files changed, 3437 insertions(+), 11 deletions(-) create mode 100644 sys/mediafoundation/gstmfh264enc.cpp create mode 100644 sys/mediafoundation/gstmfh264enc.h create mode 100644 sys/mediafoundation/gstmftransform.cpp create mode 100644 sys/mediafoundation/gstmftransform.h create mode 100644 sys/mediafoundation/gstmfvideoenc.cpp create mode 100644 sys/mediafoundation/gstmfvideoenc.h create mode 100644 sys/mediafoundation/gststrmif.h diff --git a/sys/mediafoundation/gstmfh264enc.cpp b/sys/mediafoundation/gstmfh264enc.cpp new file mode 100644 index 0000000000..0eb3586622 --- /dev/null +++ b/sys/mediafoundation/gstmfh264enc.cpp @@ -0,0 +1,1395 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/** + * SECTION:element-mfh264enc + * @title: mfh264enc + * + * This element encodes raw video into H264 compressed data. + * + * ## Example pipelines + * |[ + * gst-launch-1.0 -v videotestsrc ! mfh264enc ! h264parse ! qtmux ! filesink location=videotestsrc.mp4 + * ]| This example pipeline will encode a test video source to H264 using + * Media Foundation encoder, and muxes it in a mp4 container. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include "gstmfvideoenc.h" +#include "gstmfh264enc.h" +#include + +using namespace Microsoft::WRL; + +GST_DEBUG_CATEGORY (gst_mf_h264_enc_debug); +#define GST_CAT_DEFAULT gst_mf_h264_enc_debug + +enum +{ + GST_MF_H264_ENC_RC_MODE_CBR = 0, + GST_MF_H264_ENC_RC_MODE_PEAK_CONSTRAINED_VBR, + GST_MF_H264_ENC_RC_MODE_UNCONSTRAINED_VBR, + GST_MF_H264_ENC_RC_MODE_QUALITY, +}; + +#define GST_TYPE_MF_H264_ENC_RC_MODE (gst_mf_h264_enc_rc_mode_get_type()) +static GType +gst_mf_h264_enc_rc_mode_get_type (void) +{ + static GType rc_mode_type = 0; + + static const GEnumValue rc_mode_types[] = { + {GST_MF_H264_ENC_RC_MODE_CBR, "Constant bitrate", "cbr"}, + {GST_MF_H264_ENC_RC_MODE_PEAK_CONSTRAINED_VBR, + "Peak Constrained variable bitrate", "pcvbr"}, + {GST_MF_H264_ENC_RC_MODE_UNCONSTRAINED_VBR, + "Unconstrained variable bitrate", "uvbr"}, + {GST_MF_H264_ENC_RC_MODE_QUALITY, "Quality-based variable bitrate", "qvbr"}, + {0, NULL, NULL} + }; + + if (!rc_mode_type) { + rc_mode_type = g_enum_register_static ("GstMFH264EncRCMode", rc_mode_types); + } + return rc_mode_type; +} + +enum +{ + GST_MF_H264_ENC_ADAPTIVE_MODE_NONE, + GST_MF_H264_ENC_ADAPTIVE_MODE_FRAMERATE, +}; + +#define GST_TYPE_MF_H264_ENC_ADAPTIVE_MODE (gst_mf_h264_enc_adaptive_mode_get_type()) +static GType +gst_mf_h264_enc_adaptive_mode_get_type (void) +{ + static GType adaptive_mode_type = 0; + + static const GEnumValue adaptive_mode_types[] = { + {GST_MF_H264_ENC_ADAPTIVE_MODE_NONE, "None", "none"}, + {GST_MF_H264_ENC_ADAPTIVE_MODE_FRAMERATE, + "Adaptively change the frame rate", "framerate"}, + {0, NULL, NULL} + }; + + if (!adaptive_mode_type) { + adaptive_mode_type = + g_enum_register_static ("GstMFH264EncAdaptiveMode", + adaptive_mode_types); + } + return adaptive_mode_type; +} + +enum +{ + GST_MF_H264_ENC_CONTENT_TYPE_UNKNOWN, + GST_MF_H264_ENC_CONTENT_TYPE_FIXED_CAMERA_ANGLE, +}; + +#define GST_TYPE_MF_H264_ENC_CONTENT_TYPE (gst_mf_h264_enc_content_type_get_type()) +static GType +gst_mf_h264_enc_content_type_get_type (void) +{ + static GType content_type = 0; + + static const GEnumValue content_types[] = { + {GST_MF_H264_ENC_CONTENT_TYPE_UNKNOWN, "Unknown", "unknown"}, + {GST_MF_H264_ENC_CONTENT_TYPE_FIXED_CAMERA_ANGLE, + "Fixed Camera Angle, such as a webcam", "fixed"}, + {0, NULL, NULL} + }; + + if (!content_type) { + content_type = + g_enum_register_static ("GstMFH264EncContentType", content_types); + } + return content_type; +} + +enum +{ + PROP_0, + PROP_BITRATE, + PROP_RC_MODE, + PROP_QUALITY, + PROP_ADAPTIVE_MODE, + PROP_BUFFER_SIZE, + PROP_MAX_BITRATE, + PROP_QUALITY_VS_SPEED, + PROP_CABAC, + PROP_SPS_ID, + PROP_PPS_ID, + PROP_BFRAMES, + PROP_GOP_SIZE, + PROP_THREADS, + PROP_CONTENT_TYPE, + PROP_QP, + PROP_LOW_LATENCY, + PROP_MIN_QP, + PROP_MAX_QP, + PROP_QP_I, + PROP_QP_P, + PROP_QP_B, + PROP_REF, +}; + +#define DEFAULT_BITRATE (2 * 1024) +#define DEFAULT_RC_MODE GST_MF_H264_ENC_RC_MODE_UNCONSTRAINED_VBR +#define DEFAULT_QUALITY_LEVEL 70 +#define DEFAULT_ADAPTIVE_MODE GST_MF_H264_ENC_ADAPTIVE_MODE_NONE +#define DEFAULT_BUFFER_SIZE 0 +#define DEFAULT_MAX_BITRATE 0 +#define DEFAULT_QUALITY_VS_SPEED 50 +#define DEFAULT_CABAC TRUE +#define DEFAULT_SPS_ID 0 +#define DEFAULT_PPS_ID 0 +#define DEFAULT_BFRAMES 0 +#define DEFAULT_GOP_SIZE 0 +#define DEFAULT_THREADS 0 +#define DEFAULT_CONTENT_TYPE GST_MF_H264_ENC_CONTENT_TYPE_UNKNOWN +#define DEFAULT_QP 24 +#define DEFAULT_LOW_LATENCY FALSE +#define DEFAULT_MIN_QP 0 +#define DEFAULT_MAX_QP 51 +#define DEFAULT_QP_I 26 +#define DEFAULT_QP_P 26 +#define DEFAULT_QP_B 26 +#define DEFAULT_REF 2 + +#define GST_MF_H264_ENC_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj), G_TYPE_FROM_INSTANCE (obj), GstMFH264EncClass)) + +typedef struct _GstMFH264EncDeviceCaps +{ + /* if CodecAPI is available */ + gboolean rc_mode; /* AVEncCommonRateControlMode */ + gboolean quality; /* AVEncCommonQuality */ + + gboolean adaptive_mode; /* AVEncAdaptiveMode */ + gboolean buffer_size; /* AVEncCommonBufferSize */ + gboolean max_bitrate; /* AVEncCommonMaxBitRate */ + gboolean quality_vs_speed; /* AVEncCommonQualityVsSpeed */ + gboolean cabac; /* AVEncH264CABACEnable */ + gboolean sps_id; /* AVEncH264SPSID */ + gboolean pps_id; /* AVEncH264PPSID */ + gboolean bframes; /* AVEncMPVDefaultBPictureCount */ + gboolean gop_size; /* AVEncMPVGOPSize */ + gboolean threads; /* AVEncNumWorkerThreads */ + gboolean content_type; /* AVEncVideoContentType */ + gboolean qp; /* AVEncVideoEncodeQP */ + gboolean force_keyframe; /* AVEncVideoForceKeyFrame */ + gboolean low_latency; /* AVLowLatencyMode */ + + /* since Windows 8.1 */ + gboolean min_qp; /* AVEncVideoMinQP */ + gboolean max_qp; /* AVEncVideoMaxQP */ + gboolean frame_type_qp; /* AVEncVideoEncodeFrameTypeQP */ + gboolean max_num_ref; /* AVEncVideoMaxNumRefFrame */ + guint max_num_ref_high; + guint max_num_ref_low; +} GstMFH264EncDeviceCaps; + +typedef struct _GstMFH264Enc +{ + GstMFVideoEnc parent; + + /* properteies */ + guint bitrate; + + /* device dependent properties */ + guint rc_mode; + guint quality; + guint adaptive_mode; + guint buffer_size; + guint max_bitrate; + guint quality_vs_speed; + gboolean cabac; + guint sps_id; + guint pps_id; + guint bframes; + guint gop_size; + guint threads; + guint content_type; + guint qp; + gboolean low_latency; + guint min_qp; + guint max_qp; + guint qp_i; + guint qp_p; + guint qp_b; + guint max_num_ref; +} GstMFH264Enc; + +typedef struct _GstMFH264EncClass +{ + GstMFVideoEncClass parent_class; + + GstMFH264EncDeviceCaps device_caps; +} GstMFH264EncClass; + +typedef struct +{ + GstCaps *sink_caps; + GstCaps *src_caps; + gchar *device_name; + guint32 enum_flags; + guint device_index; + GstMFH264EncDeviceCaps device_caps; +} GstMFH264EncClassData; + +static GstElementClass *parent_class = NULL; + +static void gst_mf_h264_enc_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static void gst_mf_h264_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static gboolean gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc, + IMFMediaType * output_type); +static gboolean gst_mf_h264_enc_set_src_caps (GstMFVideoEnc * mfenc, + GstVideoCodecState * state, IMFMediaType * output_type); + +static void +gst_mf_h264_enc_class_init (GstMFH264EncClass * klass, gpointer data) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass); + GstMFH264EncClassData *cdata = (GstMFH264EncClassData *) data; + GstMFH264EncDeviceCaps *device_caps = &cdata->device_caps; + gchar *long_name; + gchar *classification; + + parent_class = (GstElementClass *) g_type_class_peek_parent (klass); + klass->device_caps = *device_caps; + + gobject_class->get_property = gst_mf_h264_enc_get_property; + gobject_class->set_property = gst_mf_h264_enc_set_property; + + g_object_class_install_property (gobject_class, PROP_BITRATE, + g_param_spec_uint ("bitrate", "Bitrate", "Bitrate in kbit/sec", 1, + (G_MAXUINT >> 10), DEFAULT_BITRATE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + if (device_caps->rc_mode) { + g_object_class_install_property (gobject_class, PROP_RC_MODE, + g_param_spec_enum ("rc-mode", "Rate Control Mode", + "Rate Control Mode " + "(Exposed only if supported by device)", + GST_TYPE_MF_H264_ENC_RC_MODE, DEFAULT_RC_MODE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + /* quality and qp has the identical meaning but scale is different + * use qp if available */ + if (device_caps->quality && !device_caps->qp) { + g_object_class_install_property (gobject_class, PROP_QUALITY, + g_param_spec_uint ("quality", "Quality", + "Quality applied when rc-mode is qvbr " + "(Exposed only if supported by device)", + 1, 100, DEFAULT_QUALITY_LEVEL, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->adaptive_mode) { + g_object_class_install_property (gobject_class, PROP_ADAPTIVE_MODE, + g_param_spec_enum ("adaptive-mode", "Adaptive Mode", + "Adaptive Mode (Exposed only if supported by device)", + GST_TYPE_MF_H264_ENC_ADAPTIVE_MODE, DEFAULT_ADAPTIVE_MODE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->buffer_size) { + g_object_class_install_property (gobject_class, PROP_BUFFER_SIZE, + g_param_spec_uint ("vbv-buffer-size", "VBV Buffer Size", + "VBV(HRD) Buffer Size in bytes (0 = MFT default) " + "(Exposed only if supported by device)", 0, G_MAXUINT - 1, + DEFAULT_BUFFER_SIZE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->max_bitrate) { + g_object_class_install_property (gobject_class, PROP_MAX_BITRATE, + g_param_spec_uint ("max-bitrate", "Max Bitrate", + "The maximum bitrate applied when rc-mode is \"pcvbr\" in kbit/sec " + "(0 = MFT default) (Exposed only if supported by device)", 0, + (G_MAXUINT >> 10), + DEFAULT_MAX_BITRATE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->quality_vs_speed) { + g_object_class_install_property (gobject_class, PROP_QUALITY_VS_SPEED, + g_param_spec_uint ("quality-vs-speed", "Quality Vs Speed", + "Quality and speed tradeoff, [0, 33]: Low complexity, " + "[34, 66]: Medium complexity, [67, 100]: High complexity " + "(Exposed only if supported by device)", 0, 100, + DEFAULT_QUALITY_VS_SPEED, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->cabac) { + g_object_class_install_property (gobject_class, PROP_CABAC, + g_param_spec_boolean ("cabac", "Use CABAC", + "Enable CABAC entropy coding " + "(Exposed only if supported by device)", + DEFAULT_CABAC, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->sps_id) { + g_object_class_install_property (gobject_class, PROP_SPS_ID, + g_param_spec_uint ("sps-id", "SPS Id", + "The SPS id to use " + "(Exposed only if supported by device)", 0, 31, + DEFAULT_SPS_ID, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->pps_id) { + g_object_class_install_property (gobject_class, PROP_PPS_ID, + g_param_spec_uint ("pps-id", "PPS Id", + "The PPS id to use " + "(Exposed only if supported by device)", 0, 255, + DEFAULT_PPS_ID, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->bframes) { + g_object_class_install_property (gobject_class, PROP_BFRAMES, + g_param_spec_uint ("bframes", "bframes", + "The maximum number of consecutive B frames, " + "(Exposed only if supported by device)", 0, 2, + DEFAULT_BFRAMES, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->gop_size) { + g_object_class_install_property (gobject_class, PROP_GOP_SIZE, + g_param_spec_uint ("gop-size", "GOP size", + "The number of pictures from one GOP header to the next, " + "(0 = MFT default) " + "(Exposed only if supported by device)", 0, G_MAXUINT - 1, + DEFAULT_GOP_SIZE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->threads) { + g_object_class_install_property (gobject_class, PROP_THREADS, + g_param_spec_uint ("threads", "Threads", + "The number of worker threads used by a encoder, " + "(0 = MFT default) " + "(Exposed only if supported by device)", 0, 16, + DEFAULT_THREADS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->content_type) { + g_object_class_install_property (gobject_class, PROP_CONTENT_TYPE, + g_param_spec_enum ("content-type", "Content Type", + "Indicates the type of video content " + "(Exposed only if supported by device)", + GST_TYPE_MF_H264_ENC_CONTENT_TYPE, DEFAULT_CONTENT_TYPE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->qp) { + g_object_class_install_property (gobject_class, PROP_QP, + g_param_spec_uint ("qp", "qp", + "QP applied when rc-mode is \"qvbr\" " + "(Exposed only if supported by device)", 16, 51, + DEFAULT_QP, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->low_latency) { + g_object_class_install_property (gobject_class, PROP_LOW_LATENCY, + g_param_spec_boolean ("low-latency", "Low Latency", + "Enable low latency encoding " + "(Exposed only if supported by device)", + DEFAULT_LOW_LATENCY, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->min_qp) { + g_object_class_install_property (gobject_class, PROP_MIN_QP, + g_param_spec_uint ("min-qp", "Min QP", + "The minimum allowed QP applied to all rc-mode " + "(Exposed only if supported by device)", 0, 51, + DEFAULT_MIN_QP, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->max_qp) { + g_object_class_install_property (gobject_class, PROP_MAX_QP, + g_param_spec_uint ("max-qp", "Max QP", + "The maximum allowed QP applied to all rc-mode " + "(Exposed only if supported by device)", 0, 51, + DEFAULT_MAX_QP, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->frame_type_qp) { + g_object_class_install_property (gobject_class, PROP_QP_I, + g_param_spec_uint ("qp-i", "QP I", + "QP applied to I frames " + "(Exposed only if supported by device)", 0, 51, + DEFAULT_QP_I, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property (gobject_class, PROP_QP_P, + g_param_spec_uint ("qp-p", "QP P", + "QP applied to P frames " + "(Exposed only if supported by device)", 0, 51, + DEFAULT_QP_P, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property (gobject_class, PROP_QP_B, + g_param_spec_uint ("qp-b", "QP B", + "QP applied to B frames " + "(Exposed only if supported by device)", 0, 51, + DEFAULT_QP_B, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + if (device_caps->max_num_ref) { + g_object_class_install_property (gobject_class, PROP_REF, + g_param_spec_uint ("ref", "Reference Frames", + "The number of reference frames " + "(Exposed only if supported by device)", + device_caps->max_num_ref_low, device_caps->max_num_ref_high, + DEFAULT_REF, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + } + + long_name = g_strdup_printf ("Media Foundation %s", cdata->device_name); + classification = g_strdup_printf ("Codec/Encoder/Video%s", + (cdata->enum_flags & MFT_ENUM_FLAG_HARDWARE) == MFT_ENUM_FLAG_HARDWARE ? + "/Hardware" : ""); + gst_element_class_set_metadata (element_class, long_name, + classification, + "Microsoft Media Foundation H.264 Encoder", + "Seungha Yang "); + g_free (long_name); + g_free (classification); + + gst_element_class_add_pad_template (element_class, + gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, + cdata->sink_caps)); + gst_element_class_add_pad_template (element_class, + gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, + cdata->src_caps)); + + mfenc_class->set_option = GST_DEBUG_FUNCPTR (gst_mf_h264_enc_set_option); + mfenc_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_h264_enc_set_src_caps); + + mfenc_class->codec_id = MFVideoFormat_H264; + mfenc_class->enum_flags = cdata->enum_flags; + mfenc_class->device_index = cdata->device_index; + mfenc_class->can_force_keyframe = device_caps->force_keyframe; + + g_free (cdata->device_name); + gst_caps_unref (cdata->sink_caps); + gst_caps_unref (cdata->src_caps); + g_free (cdata); +} + +static void +gst_mf_h264_enc_init (GstMFH264Enc * self) +{ + self->bitrate = DEFAULT_BITRATE; + self->rc_mode = DEFAULT_RC_MODE; + self->quality = DEFAULT_QUALITY_LEVEL; + self->adaptive_mode = DEFAULT_ADAPTIVE_MODE; + self->max_bitrate = DEFAULT_MAX_BITRATE; + self->quality_vs_speed = DEFAULT_QUALITY_VS_SPEED; + self->cabac = DEFAULT_CABAC; + self->sps_id = DEFAULT_SPS_ID; + self->pps_id = DEFAULT_PPS_ID; + self->bframes = DEFAULT_BFRAMES; + self->gop_size = DEFAULT_GOP_SIZE; + self->threads = DEFAULT_THREADS; + self->content_type = DEFAULT_CONTENT_TYPE; + self->qp = DEFAULT_QP; + self->low_latency = DEFAULT_LOW_LATENCY; + self->min_qp = DEFAULT_MIN_QP; + self->max_qp = DEFAULT_MAX_QP; + self->qp_i = DEFAULT_QP_I; + self->qp_p = DEFAULT_QP_P; + self->qp_b = DEFAULT_QP_B; + self->max_num_ref = DEFAULT_REF; +} + +static void +gst_mf_h264_enc_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstMFH264Enc *self = (GstMFH264Enc *) (object); + + switch (prop_id) { + case PROP_BITRATE: + g_value_set_uint (value, self->bitrate); + break; + case PROP_RC_MODE: + g_value_set_enum (value, self->rc_mode); + break; + case PROP_QUALITY: + g_value_set_uint (value, self->quality); + break; + case PROP_ADAPTIVE_MODE: + g_value_set_enum (value, self->adaptive_mode); + break; + case PROP_BUFFER_SIZE: + g_value_set_uint (value, self->buffer_size); + break; + case PROP_MAX_BITRATE: + g_value_set_uint (value, self->max_bitrate); + break; + case PROP_QUALITY_VS_SPEED: + g_value_set_uint (value, self->quality_vs_speed); + break; + case PROP_CABAC: + g_value_set_boolean (value, self->cabac); + break; + case PROP_SPS_ID: + g_value_set_uint (value, self->sps_id); + break; + case PROP_PPS_ID: + g_value_set_uint (value, self->pps_id); + break; + case PROP_BFRAMES: + g_value_set_uint (value, self->bframes); + break; + case PROP_GOP_SIZE: + g_value_set_uint (value, self->gop_size); + break; + case PROP_THREADS: + g_value_set_uint (value, self->threads); + break; + case PROP_CONTENT_TYPE: + g_value_set_enum (value, self->content_type); + break; + case PROP_QP: + g_value_set_uint (value, self->qp); + break; + case PROP_LOW_LATENCY: + g_value_set_boolean (value, self->low_latency); + break; + case PROP_MIN_QP: + g_value_set_uint (value, self->min_qp); + break; + case PROP_MAX_QP: + g_value_set_uint (value, self->max_qp); + break; + case PROP_QP_I: + g_value_set_uint (value, self->qp_i); + break; + case PROP_QP_P: + g_value_set_uint (value, self->qp_p); + break; + case PROP_QP_B: + g_value_set_uint (value, self->qp_b); + break; + case PROP_REF: + g_value_set_uint (value, self->max_num_ref); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_mf_h264_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstMFH264Enc *self = (GstMFH264Enc *) (object); + + switch (prop_id) { + case PROP_BITRATE: + self->bitrate = g_value_get_uint (value); + break; + case PROP_RC_MODE: + self->rc_mode = g_value_get_enum (value); + break; + case PROP_QUALITY: + self->quality = g_value_get_uint (value); + break; + case PROP_ADAPTIVE_MODE: + self->adaptive_mode = g_value_get_enum (value); + break; + case PROP_BUFFER_SIZE: + self->buffer_size = g_value_get_uint (value); + break; + case PROP_MAX_BITRATE: + self->max_bitrate = g_value_get_uint (value); + break; + case PROP_QUALITY_VS_SPEED: + self->quality_vs_speed = g_value_get_uint (value); + break; + case PROP_CABAC: + self->cabac = g_value_get_boolean (value); + break; + case PROP_SPS_ID: + self->sps_id = g_value_get_uint (value); + break; + case PROP_PPS_ID: + self->pps_id = g_value_get_uint (value); + break; + case PROP_BFRAMES: + self->bframes = g_value_get_uint (value); + break; + case PROP_GOP_SIZE: + self->gop_size = g_value_get_uint (value); + break; + case PROP_THREADS: + self->threads = g_value_get_uint (value); + break; + case PROP_CONTENT_TYPE: + self->content_type = g_value_get_enum (value); + break; + case PROP_QP: + self->qp = g_value_get_uint (value); + break; + case PROP_LOW_LATENCY: + self->low_latency = g_value_get_boolean (value); + break; + case PROP_MIN_QP: + self->min_qp = g_value_get_uint (value); + break; + case PROP_MAX_QP: + self->max_qp = g_value_get_uint (value); + break; + case PROP_QP_I: + self->qp_i = g_value_get_uint (value); + break; + case PROP_QP_P: + self->qp_p = g_value_get_uint (value); + break; + case PROP_QP_B: + self->qp_b = g_value_get_uint (value); + break; + case PROP_REF: + self->max_num_ref = g_value_get_uint (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static guint +gst_mf_h264_enc_rc_mode_to_enum (guint rc_mode) +{ + switch (rc_mode) { + case GST_MF_H264_ENC_RC_MODE_CBR: + return eAVEncCommonRateControlMode_CBR; + case GST_MF_H264_ENC_RC_MODE_PEAK_CONSTRAINED_VBR: + return eAVEncCommonRateControlMode_PeakConstrainedVBR; + case GST_MF_H264_ENC_RC_MODE_UNCONSTRAINED_VBR: + return eAVEncCommonRateControlMode_UnconstrainedVBR; + case GST_MF_H264_ENC_RC_MODE_QUALITY: + return eAVEncCommonRateControlMode_Quality; + default: + return G_MAXUINT; + } +} + +static guint +gst_mf_h264_enc_adaptive_mode_to_enum (guint rc_mode) +{ + switch (rc_mode) { + case GST_MF_H264_ENC_ADAPTIVE_MODE_NONE: + return eAVEncAdaptiveMode_None; + case GST_MF_H264_ENC_ADAPTIVE_MODE_FRAMERATE: + return eAVEncAdaptiveMode_FrameRate; + default: + return G_MAXUINT; + } +} + +static guint +gst_mf_h264_enc_content_type_to_enum (guint rc_mode) +{ + switch (rc_mode) { + case GST_MF_H264_ENC_CONTENT_TYPE_UNKNOWN: + return eAVEncVideoContentType_Unknown; + case GST_MF_H264_ENC_CONTENT_TYPE_FIXED_CAMERA_ANGLE: + return eAVEncVideoContentType_FixedCameraAngle; + default: + return G_MAXUINT; + } +} + +#define WARNING_HR(hr,func) \ + G_STMT_START { \ + if (!gst_mf_result (hr)) { \ + GST_WARNING_OBJECT (self, G_STRINGIFY(func) " failed, hr: 0x%x", (guint) hr); \ + } \ + } G_STMT_END + +static gboolean +gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc, IMFMediaType * output_type) +{ + GstMFH264Enc *self = (GstMFH264Enc *) mfenc; + GstMFH264EncClass *klass = GST_MF_H264_ENC_GET_CLASS (self); + GstMFH264EncDeviceCaps *device_caps = &klass->device_caps; + HRESULT hr; + GstCaps *allowed_caps, *template_caps; + guint selected_profile = eAVEncH264VProfile_Main; + gint level_idc = -1; + GstMFTransform *transform = mfenc->transform; + + template_caps = + gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (self)); + allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (self)); + + if (template_caps == allowed_caps) { + GST_INFO_OBJECT (self, "downstream has ANY caps"); + } else if (allowed_caps) { + GstStructure *s; + const gchar *profile; + const gchar *level; + + if (gst_caps_is_empty (allowed_caps)) { + gst_caps_unref (allowed_caps); + gst_caps_unref (template_caps); + return FALSE; + } + + allowed_caps = gst_caps_make_writable (allowed_caps); + allowed_caps = gst_caps_fixate (allowed_caps); + s = gst_caps_get_structure (allowed_caps, 0); + + profile = gst_structure_get_string (s, "profile"); + if (profile) { + if (!strcmp (profile, "baseline")) { + selected_profile = eAVEncH264VProfile_Base; + } else if (g_str_has_prefix (profile, "high")) { + selected_profile = eAVEncH264VProfile_High; + } else if (g_str_has_prefix (profile, "main")) { + selected_profile = eAVEncH264VProfile_Main; + } + } + + level = gst_structure_get_string (s, "level"); + if (level) + level_idc = gst_codec_utils_h264_get_level_idc (level); + + gst_caps_unref (allowed_caps); + } + gst_caps_unref (template_caps); + + hr = output_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_H264); + if (!gst_mf_result (hr)) + return FALSE; + + hr = output_type->SetUINT32 (MF_MT_MPEG2_PROFILE, selected_profile); + if (!gst_mf_result (hr)) + return FALSE; + + if (level_idc >= eAVEncH264VLevel1 && level_idc <= eAVEncH264VLevel5_2) { + hr = output_type->SetUINT32 (MF_MT_MPEG2_LEVEL, level_idc); + if (!gst_mf_result (hr)) + return FALSE; + } + + hr = output_type->SetUINT32 (MF_MT_AVG_BITRATE, + MIN (self->bitrate * 1024, G_MAXUINT - 1)); + if (!gst_mf_result (hr)) + return FALSE; + + if (device_caps->rc_mode) { + guint rc_mode; + rc_mode = gst_mf_h264_enc_rc_mode_to_enum (self->rc_mode); + if (rc_mode != G_MAXUINT) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncCommonRateControlMode, rc_mode); + WARNING_HR (hr, CODECAPI_AVEncCommonRateControlMode); + } + } + + if (device_caps->quality && !device_caps->qp) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncCommonQuality, self->quality); + WARNING_HR (hr, CODECAPI_AVEncCommonQuality); + } + + if (device_caps->adaptive_mode) { + guint adaptive_mode; + adaptive_mode = + gst_mf_h264_enc_adaptive_mode_to_enum (self->adaptive_mode); + if (adaptive_mode != G_MAXUINT) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncAdaptiveMode, adaptive_mode); + WARNING_HR (hr, CODECAPI_AVEncAdaptiveMode); + } + } + + if (device_caps->buffer_size && self->buffer_size > 0) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncCommonBufferSize, self->buffer_size); + WARNING_HR (hr, CODECAPI_AVEncCommonBufferSize); + } + + if (device_caps->max_bitrate && self->max_bitrate > 0) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncCommonMaxBitRate, + MIN (self->max_bitrate * 1024, G_MAXUINT - 1)); + WARNING_HR (hr, CODECAPI_AVEncCommonMaxBitRate); + } + + if (device_caps->quality_vs_speed) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncCommonQualityVsSpeed, + self->quality_vs_speed); + WARNING_HR (hr, CODECAPI_AVEncCommonQualityVsSpeed); + } + + if (device_caps->cabac && selected_profile != eAVEncH264VProfile_Base) { + hr = gst_mf_transform_set_codec_api_boolean (transform, + &CODECAPI_AVEncH264CABACEnable, self->cabac); + WARNING_HR (hr, CODECAPI_AVEncH264CABACEnable); + } + + if (device_caps->sps_id) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncH264SPSID, self->sps_id); + WARNING_HR (hr, CODECAPI_AVEncH264SPSID); + } + + if (device_caps->pps_id) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncH264PPSID, self->pps_id); + WARNING_HR (hr, CODECAPI_AVEncH264PPSID); + } + + if (device_caps->bframes && selected_profile != eAVEncH264VProfile_Base) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncMPVDefaultBPictureCount, self->bframes); + WARNING_HR (hr, CODECAPI_AVEncMPVDefaultBPictureCount); + } + + if (device_caps->gop_size) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncMPVGOPSize, self->gop_size); + WARNING_HR (hr, CODECAPI_AVEncMPVGOPSize); + } + + if (device_caps->threads) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncNumWorkerThreads, self->threads); + WARNING_HR (hr, CODECAPI_AVEncNumWorkerThreads); + } + + if (device_caps->content_type) { + guint content_type; + content_type = gst_mf_h264_enc_content_type_to_enum (self->content_type); + if (content_type != G_MAXUINT) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncVideoContentType, content_type); + WARNING_HR (hr, CODECAPI_AVEncVideoContentType); + } + } + + if (device_caps->qp) { + hr = gst_mf_transform_set_codec_api_uint64 (transform, + &CODECAPI_AVEncVideoEncodeQP, self->qp); + WARNING_HR (hr, CODECAPI_AVEncVideoEncodeQP); + } + + if (device_caps->low_latency) { + hr = gst_mf_transform_set_codec_api_boolean (transform, + &CODECAPI_AVLowLatencyMode, self->low_latency); + WARNING_HR (hr, CODECAPI_AVLowLatencyMode); + } + + if (device_caps->min_qp) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncVideoMinQP, self->min_qp); + WARNING_HR (hr, CODECAPI_AVEncVideoMinQP); + } + + if (device_caps->max_qp) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncVideoMaxQP, self->max_qp); + WARNING_HR (hr, CODECAPI_AVEncVideoMaxQP); + } + + if (device_caps->frame_type_qp) { + guint64 type_qp = 0; + + type_qp = + (guint64) self->qp_i | (guint64) self->qp_p << 16 | + (guint64) self->qp_b << 32; + hr = gst_mf_transform_set_codec_api_uint64 (transform, + &CODECAPI_AVEncVideoEncodeFrameTypeQP, type_qp); + WARNING_HR (hr, CODECAPI_AVEncVideoEncodeFrameTypeQP); + } + + if (device_caps->max_num_ref) { + hr = gst_mf_transform_set_codec_api_uint32 (transform, + &CODECAPI_AVEncVideoMaxNumRefFrame, self->max_num_ref); + WARNING_HR (hr, CODECAPI_AVEncVideoMaxNumRefFrame); + } + + return TRUE; +} + +static gboolean +gst_mf_h264_enc_set_src_caps (GstMFVideoEnc * mfenc, + GstVideoCodecState * state, IMFMediaType * output_type) +{ + GstMFH264Enc *self = (GstMFH264Enc *) mfenc; + GstVideoCodecState *out_state; + GstStructure *s; + GstCaps *out_caps; + GstTagList *tags; + + out_caps = gst_caps_new_empty_simple ("video/x-h264"); + s = gst_caps_get_structure (out_caps, 0); + + gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream", + "alignment", G_TYPE_STRING, "au", NULL); + + out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self), + out_caps, state); + + GST_INFO_OBJECT (self, "output caps: %" GST_PTR_FORMAT, out_state->caps); + + /* encoder will keep it around for us */ + gst_video_codec_state_unref (out_state); + + tags = gst_tag_list_new_empty (); + gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, + gst_element_get_metadata (GST_ELEMENT_CAST (self), + GST_ELEMENT_METADATA_LONGNAME), NULL); + gst_video_encoder_merge_tags (GST_VIDEO_ENCODER (self), tags, + GST_TAG_MERGE_REPLACE); + gst_tag_list_unref (tags); + + return TRUE; +} + +static void +gst_mf_h264_enc_register (GstPlugin * plugin, guint rank, + const gchar * device_name, const GstMFH264EncDeviceCaps * device_caps, + guint32 enum_flags, guint device_index, + GstCaps * sink_caps, GstCaps * src_caps) +{ + GType type; + gchar *type_name; + gchar *feature_name; + gint i; + GstMFH264EncClassData *cdata; + gboolean is_default = TRUE; + GTypeInfo type_info = { + sizeof (GstMFH264EncClass), + NULL, + NULL, + (GClassInitFunc) gst_mf_h264_enc_class_init, + NULL, + NULL, + sizeof (GstMFH264Enc), + 0, + (GInstanceInitFunc) gst_mf_h264_enc_init, + }; + + cdata = g_new0 (GstMFH264EncClassData, 1); + cdata->sink_caps = sink_caps; + cdata->src_caps = src_caps; + cdata->device_name = g_strdup (device_name); + cdata->device_caps = *device_caps; + cdata->enum_flags = enum_flags; + cdata->device_index = device_index; + type_info.class_data = cdata; + + type_name = g_strdup ("GstMFH264Enc"); + feature_name = g_strdup ("mfh264enc"); + + i = 1; + while (g_type_from_name (type_name) != 0) { + g_free (type_name); + g_free (feature_name); + type_name = g_strdup_printf ("GstMFH264Device%dEnc", i); + feature_name = g_strdup_printf ("mfh264device%denc", i); + is_default = FALSE; + i++; + } + + type = + g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name, &type_info, + (GTypeFlags) 0); + + /* make lower rank than default device */ + if (rank > 0 && !is_default) + rank--; + + if (!gst_element_register (plugin, feature_name, rank, type)) + GST_WARNING ("Failed to register plugin '%s'", type_name); + + g_free (type_name); + g_free (feature_name); +} + +typedef struct +{ + guint width; + guint height; +} GstMFH264EncResolution; + +typedef struct +{ + eAVEncH264VProfile profile; + const gchar *profile_str; +} GStMFH264EncProfileMap; + +static void +gst_mf_h264_enc_plugin_init_internal (GstPlugin * plugin, guint rank, + GstMFTransform * transform, guint device_index, guint32 enum_flags) +{ + HRESULT hr; + MFT_REGISTER_TYPE_INFO *infos; + UINT32 info_size; + gint i; + GstCaps *src_caps = NULL; + GstCaps *sink_caps = NULL; + GValue *supported_formats = NULL; + gboolean have_I420 = FALSE; + gchar *device_name = NULL; + GstMFH264EncDeviceCaps device_caps = { 0, }; + IMFActivate *activate; + IMFTransform *encoder; + ICodecAPI *codec_api; + ComPtr out_type; + GstMFH264EncResolution resolutions_to_check[] = { + {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160}, {8192, 4320} + }; + guint max_width = 0; + guint max_height = 0; + guint resolution; + GStMFH264EncProfileMap profiles_to_check[] = { + { eAVEncH264VProfile_High, "high" }, + { eAVEncH264VProfile_Main, "main" }, + { eAVEncH264VProfile_Base, "baseline" }, + }; + guint num_profiles = 0; + GValue profiles = G_VALUE_INIT; + + /* NOTE: depending on environment, + * some enumerated h/w MFT might not be usable (e.g., multiple GPU case) */ + if (!gst_mf_transform_open (transform)) + return; + + activate = gst_mf_transform_get_activate_handle (transform); + if (!activate) { + GST_WARNING_OBJECT (transform, "No IMFActivate interface available"); + return; + } + + encoder = gst_mf_transform_get_transform_handle (transform); + if (!encoder) { + GST_WARNING_OBJECT (transform, "No IMFTransform interface available"); + return; + } + + codec_api = gst_mf_transform_get_codec_api_handle (transform); + if (!codec_api) { + GST_WARNING_OBJECT (transform, "No ICodecAPI interface available"); + return; + } + + g_object_get (transform, "device-name", &device_name, NULL); + if (!device_name) { + GST_WARNING_OBJECT (transform, "Unknown device name"); + return; + } + + g_value_init (&profiles, GST_TYPE_LIST); + + hr = activate->GetAllocatedBlob (MFT_INPUT_TYPES_Attributes, + (UINT8 **) & infos, &info_size); + if (!gst_mf_result (hr)) + goto done; + + for (i = 0; i < info_size / sizeof (MFT_REGISTER_TYPE_INFO); i++) { + GstVideoFormat vformat; + GValue val = G_VALUE_INIT; + + vformat = gst_mf_video_subtype_to_video_format (&infos[i].guidSubtype); + if (vformat == GST_VIDEO_FORMAT_UNKNOWN) + continue; + + if (!supported_formats) { + supported_formats = g_new0 (GValue, 1); + g_value_init (supported_formats, GST_TYPE_LIST); + } + + /* media foundation has duplicated formats IYUV and I420 */ + if (vformat == GST_VIDEO_FORMAT_I420) { + if (have_I420) + continue; + + have_I420 = TRUE; + } + + g_value_init (&val, G_TYPE_STRING); + g_value_set_static_string (&val, gst_video_format_to_string (vformat)); + gst_value_list_append_and_take_value (supported_formats, &val); + } + + CoTaskMemFree (infos); + + if (!supported_formats) + goto done; + + /* check supported profiles and resolutions */ + hr = MFCreateMediaType (out_type.GetAddressOf ()); + if (!gst_mf_result (hr)) + goto done; + + hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!gst_mf_result (hr)) + goto done; + + hr = out_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_H264); + if (!gst_mf_result (hr)) + goto done; + + hr = out_type->SetUINT32 (MF_MT_AVG_BITRATE, 2048000); + if (!gst_mf_result (hr)) + goto done; + + hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, 30, 1); + if (!gst_mf_result (hr)) + goto done; + + hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!gst_mf_result (hr)) + goto done; + + GST_DEBUG_OBJECT (transform, "Check supported profiles of %s", + device_name); + for (i = 0; i < G_N_ELEMENTS (profiles_to_check); i++) { + GValue profile_val = G_VALUE_INIT; + + hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, + profiles_to_check[i].profile); + if (!gst_mf_result (hr)) + goto done; + + hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, + resolutions_to_check[0].width, resolutions_to_check[0].height); + if (!gst_mf_result (hr)) + break; + + if (!gst_mf_transform_set_output_type (transform, out_type.Get ())) + break; + + GST_DEBUG_OBJECT (transform, "MFT supports h264 %s profile", + profiles_to_check[i].profile_str); + + g_value_init (&profile_val, G_TYPE_STRING); + g_value_set_static_string (&profile_val, profiles_to_check[i].profile_str); + gst_value_list_append_and_take_value (&profiles, &profile_val); + num_profiles++; + + /* clear media type */ + gst_mf_transform_set_output_type (transform, NULL); + } + + if (num_profiles == 0) { + GST_WARNING_OBJECT (transform, "Couldn't query supported profile"); + goto done; + } + + /* baseline is default profile */ + hr = out_type->SetUINT32 (MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base); + if (!gst_mf_result (hr)) + goto done; + + GST_DEBUG_OBJECT (transform, "Check supported resolutions of %s", + device_name); + for (i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) { + guint width, height; + + width = resolutions_to_check[i].width; + height = resolutions_to_check[i].height; + + hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, width, height); + if (!gst_mf_result (hr)) + break; + + if (!gst_mf_transform_set_output_type (transform, out_type.Get ())) + break; + + max_width = width; + max_height = height; + + GST_DEBUG_OBJECT (transform, + "MFT supports resolution %dx%d", max_width, max_height); + + /* clear media type */ + gst_mf_transform_set_output_type (transform, NULL); + } + + if (max_width == 0 || max_height == 0) { + GST_WARNING_OBJECT (transform, "Couldn't query supported resolution"); + goto done; + } + + /* high profile supported since windows8 */ + src_caps = gst_caps_from_string ("video/x-h264, " + "stream-format=(string) byte-stream, " + "alignment=(string) au"); + gst_caps_set_value (src_caps, "profile", &profiles); + + sink_caps = gst_caps_new_empty_simple ("video/x-raw"); + gst_caps_set_value (sink_caps, "format", supported_formats); + g_value_unset (supported_formats); + g_free (supported_formats); + + /* To cover both landscape and portrait, select max value */ + resolution = MAX (max_width, max_height); + gst_caps_set_simple (sink_caps, + "width", GST_TYPE_INT_RANGE, 64, resolution, + "height", GST_TYPE_INT_RANGE, 64, resolution, NULL); + gst_caps_set_simple (src_caps, + "width", GST_TYPE_INT_RANGE, 64, resolution, + "height", GST_TYPE_INT_RANGE, 64, resolution, NULL); + + GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED); + GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED); + +#define CHECK_DEVICE_CAPS(codec_obj,api,val) \ + if (SUCCEEDED((codec_obj)->IsSupported(&(api)))) {\ + device_caps.val = TRUE; \ + } + + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonRateControlMode, rc_mode); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonQuality, quality); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncAdaptiveMode, adaptive_mode); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonBufferSize, buffer_size); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncCommonMaxBitRate, max_bitrate); + CHECK_DEVICE_CAPS (codec_api, + CODECAPI_AVEncCommonQualityVsSpeed, quality_vs_speed); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264CABACEnable, cabac); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264SPSID, sps_id); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncH264PPSID, pps_id); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVDefaultBPictureCount, bframes); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncMPVGOPSize, gop_size); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncNumWorkerThreads, threads); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoContentType, content_type); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoEncodeQP, qp); + CHECK_DEVICE_CAPS (codec_api, + CODECAPI_AVEncVideoForceKeyFrame, force_keyframe); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVLowLatencyMode, low_latency); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMinQP, min_qp); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxQP, max_qp); + CHECK_DEVICE_CAPS (codec_api, + CODECAPI_AVEncVideoEncodeFrameTypeQP, frame_type_qp); + CHECK_DEVICE_CAPS (codec_api, CODECAPI_AVEncVideoMaxNumRefFrame, max_num_ref); + if (device_caps.max_num_ref) { + VARIANT min; + VARIANT max; + VARIANT step; + + hr = codec_api->GetParameterRange (&CODECAPI_AVEncVideoMaxNumRefFrame, + &min, &max, &step); + if (SUCCEEDED (hr)) { + device_caps.max_num_ref = TRUE; + device_caps.max_num_ref_high = max.uiVal; + device_caps.max_num_ref_low = min.uiVal; + VariantClear (&min); + VariantClear (&max); + VariantClear (&step); + } + } + + gst_mf_h264_enc_register (plugin, rank, device_name, + &device_caps, enum_flags, device_index, sink_caps, src_caps); + +done: + g_value_unset (&profiles); + g_free (device_name); +} + +void +gst_mf_h264_enc_plugin_init (GstPlugin * plugin, guint rank) +{ + GstMFTransformEnumParams enum_params = { 0, }; + MFT_REGISTER_TYPE_INFO output_type; + GstMFTransform *transform; + gint i; + gboolean do_next; + + CoInitializeEx (NULL, COINIT_MULTITHREADED); + + GST_DEBUG_CATEGORY_INIT (gst_mf_h264_enc_debug, "mfh264enc", 0, "mfh264enc"); + + output_type.guidMajorType = MFMediaType_Video; + output_type.guidSubtype = MFVideoFormat_H264; + + enum_params.category = MFT_CATEGORY_VIDEO_ENCODER; + enum_params.enum_flags = (MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_ASYNCMFT | + MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY); + enum_params.output_typeinfo = &output_type; + + /* register hardware encoders first */ + i = 0; + do { + enum_params.device_index = i++; + transform = gst_mf_transform_new (&enum_params); + do_next = TRUE; + + if (!transform) { + do_next = FALSE; + } else { + gst_mf_h264_enc_plugin_init_internal (plugin, rank, transform, + enum_params.device_index, enum_params.enum_flags); + gst_clear_object (&transform); + } + } while (do_next); + + /* register software encoders */ + enum_params.enum_flags = (MFT_ENUM_FLAG_SYNCMFT | + MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY); + i = 0; + do { + enum_params.device_index = i++; + transform = gst_mf_transform_new (&enum_params); + do_next = TRUE; + + if (!transform) { + do_next = FALSE; + } else { + gst_mf_h264_enc_plugin_init_internal (plugin, rank, transform, + enum_params.device_index, enum_params.enum_flags); + gst_clear_object (&transform); + } + } while (do_next); + + CoUninitialize (); +} diff --git a/sys/mediafoundation/gstmfh264enc.h b/sys/mediafoundation/gstmfh264enc.h new file mode 100644 index 0000000000..00e6de0c01 --- /dev/null +++ b/sys/mediafoundation/gstmfh264enc.h @@ -0,0 +1,33 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_MF_H264_ENC_H__ +#define __GST_MF_H264_ENC_H__ + +#include + +G_BEGIN_DECLS + +void gst_mf_h264_enc_plugin_init (GstPlugin * plugin, + guint rank); + +G_END_DECLS + +#endif /* __GST_MF_H264_ENC_H__ */ \ No newline at end of file diff --git a/sys/mediafoundation/gstmftransform.cpp b/sys/mediafoundation/gstmftransform.cpp new file mode 100644 index 0000000000..eb6850feb5 --- /dev/null +++ b/sys/mediafoundation/gstmftransform.cpp @@ -0,0 +1,929 @@ +/* GStreamer + * Copyright (C) 2020 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "gstmftransform.h" +#include "gstmfutils.h" +#include +#include + +using namespace Microsoft::WRL; + +extern "C" { +GST_DEBUG_CATEGORY_EXTERN (gst_mf_transform_debug); +#define GST_CAT_DEFAULT gst_mf_transform_debug +} + +enum +{ + PROP_0, + PROP_DEVICE_NAME, + PROP_HARDWARE, + PROP_ENUM_PARAMS, +}; + +struct _GstMFTransform +{ + GstObject object; + gboolean initialized; + + GstMFTransformEnumParams enum_params; + + gchar *device_name; + gboolean hardware; + + IMFActivate *activate; + IMFTransform *transform; + ICodecAPI * codec_api; + IMFMediaEventGenerator *event_gen; + + GQueue *output_queue; + + DWORD input_id; + DWORD output_id; + + gboolean need_start; + + gint pending_need_input; + gint pending_have_output; +}; + +#define gst_mf_transform_parent_class parent_class +G_DEFINE_TYPE (GstMFTransform, gst_mf_transform, GST_TYPE_OBJECT); + +static void gst_mf_transform_constructed (GObject * object); +static void gst_mf_transform_finalize (GObject * object); +static void gst_mf_transform_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); +static void gst_mf_transform_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); + +static void +gst_mf_transform_class_init (GstMFTransformClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + + gobject_class->constructed = gst_mf_transform_constructed; + gobject_class->finalize = gst_mf_transform_finalize; + gobject_class->get_property = gst_mf_transform_get_property; + gobject_class->set_property = gst_mf_transform_set_property; + + g_object_class_install_property (gobject_class, PROP_DEVICE_NAME, + g_param_spec_string ("device-name", "device-name", + "Device name", NULL, + (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (gobject_class, PROP_HARDWARE, + g_param_spec_boolean ("hardware", "Hardware", + "Whether hardware device or not", FALSE, + (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property (gobject_class, PROP_ENUM_PARAMS, + g_param_spec_pointer ("enum-params", "Enum Params", + "GstMFTransformEnumParams for MFTEnumEx", + (GParamFlags) (G_PARAM_WRITABLE | G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS))); +} + +static void +gst_mf_transform_init (GstMFTransform * self) +{ + self->output_queue = g_queue_new (); + + CoInitializeEx (NULL, COINIT_MULTITHREADED); +} + +static void +gst_mf_transform_clear_enum_params (GstMFTransformEnumParams *params) +{ + g_free (params->input_typeinfo); + params->input_typeinfo = NULL; + + g_free (params->output_typeinfo); + params->output_typeinfo = NULL; +} + +static void +release_mf_sample (IMFSample * sample) +{ + if (sample) + sample->Release (); +} + +static void +gst_mf_transform_finalize (GObject * object) +{ + GstMFTransform *self = GST_MF_TRANSFORM (object); + + gst_mf_transform_close (self); + + if (self->activate) + self->activate->Release (); + + gst_mf_transform_clear_enum_params (&self->enum_params); + g_free (self->device_name); + + g_queue_free_full (self->output_queue, (GDestroyNotify) release_mf_sample); + + CoUninitialize (); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_mf_transform_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstMFTransform *self = GST_MF_TRANSFORM (object); + + switch (prop_id) { + case PROP_DEVICE_NAME: + g_value_set_string (value, self->device_name); + break; + case PROP_HARDWARE: + g_value_set_boolean (value, self->hardware); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_mf_transform_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstMFTransform *self = GST_MF_TRANSFORM (object); + + switch (prop_id) { + case PROP_ENUM_PARAMS: + { + GstMFTransformEnumParams *params; + params = (GstMFTransformEnumParams *) g_value_get_pointer (value); + + gst_mf_transform_clear_enum_params (&self->enum_params); + self->enum_params.category = params->category; + self->enum_params.enum_flags = params->enum_flags; + self->enum_params.device_index = params->device_index; + if (params->input_typeinfo) { + self->enum_params.input_typeinfo = g_new0 (MFT_REGISTER_TYPE_INFO, 1); + memcpy (self->enum_params.input_typeinfo, params->input_typeinfo, + sizeof (MFT_REGISTER_TYPE_INFO)); + } + + if (params->output_typeinfo) { + self->enum_params.output_typeinfo = g_new0 (MFT_REGISTER_TYPE_INFO, 1); + memcpy (self->enum_params.output_typeinfo, params->output_typeinfo, + sizeof (MFT_REGISTER_TYPE_INFO)); + } + break; + } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_mf_transform_constructed (GObject * object) +{ + GstMFTransform *self = GST_MF_TRANSFORM (object); + HRESULT hr; + IMFActivate **devices = NULL; + UINT32 num_devices, i; + LPWSTR name = NULL; + + hr = MFTEnumEx (self->enum_params.category, self->enum_params.enum_flags, + self->enum_params.input_typeinfo, self->enum_params.output_typeinfo, + &devices, &num_devices); + + if (!gst_mf_result (hr)) { + GST_WARNING_OBJECT (self, "MFTEnumEx failure"); + return; + } + + if (num_devices == 0 || self->enum_params.device_index >= num_devices) { + GST_WARNING_OBJECT (self, "No available device at index %d", + self->enum_params.device_index); + for (i = 0; i < num_devices; i++) { + devices[i]->Release (); + } + + CoTaskMemFree (devices); + return; + } + + self->activate = devices[self->enum_params.device_index]; + self->activate->AddRef (); + + for (i = 0; i < num_devices; i++) + devices[i]->Release (); + + hr = self->activate->GetAllocatedString (MFT_FRIENDLY_NAME_Attribute, + &name, NULL); + + if (gst_mf_result (hr)) { + self->device_name = g_utf16_to_utf8 ((const gunichar2 *) name, + -1, NULL, NULL, NULL); + + CoTaskMemFree (name); + + GST_INFO_OBJECT (self, "Open device %s", self->device_name); + } + +done: + CoTaskMemFree (devices); + + self->hardware = ! !(self->enum_params.enum_flags & MFT_ENUM_FLAG_HARDWARE); + self->initialized = TRUE; +} + +static HRESULT +gst_mf_transform_pop_event (GstMFTransform * self, + gboolean no_wait, MediaEventType * event_type) +{ + ComPtr event; + MediaEventType type; + HRESULT hr; + DWORD flags = 0; + + if (!self->hardware || !self->event_gen) + return MF_E_NO_EVENTS_AVAILABLE; + + if (no_wait) + flags = MF_EVENT_FLAG_NO_WAIT; + + hr = self->event_gen->GetEvent (flags, event.GetAddressOf ()); + + if (hr == MF_E_NO_EVENTS_AVAILABLE) + return hr; + else if (!gst_mf_result (hr)) + return hr; + + hr = event->GetType (&type); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Failed to get event, hr: 0x%x", (guint) hr); + + return hr; + } + + *event_type = type; + return S_OK; +} + +static void +gst_mf_transform_drain_all_events (GstMFTransform * self) +{ + HRESULT hr; + + if (!self->hardware) + return; + + do { + MediaEventType type; + + hr = gst_mf_transform_pop_event (self, TRUE, &type); + if (hr == MF_E_NO_EVENTS_AVAILABLE || !gst_mf_result (hr)) + return; + + switch (type) { + case METransformNeedInput: + self->pending_need_input++; + break; + case METransformHaveOutput: + self->pending_have_output++; + break; + default: + GST_DEBUG_OBJECT (self, "Unhandled event %d", type); + break; + } + } while (SUCCEEDED (hr)); +} + +static GstFlowReturn +gst_mf_transform_process_output (GstMFTransform * self) +{ + DWORD status; + HRESULT hr; + IMFTransform *transform = self->transform; + DWORD stream_id = self->output_id; + MFT_OUTPUT_STREAM_INFO out_stream_info = { 0 }; + MFT_OUTPUT_DATA_BUFFER out_data = { 0 }; + + hr = transform->GetOutputStreamInfo (stream_id, &out_stream_info); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't get output stream info"); + return GST_FLOW_ERROR; + } + + if ((out_stream_info.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | + MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES)) == 0) { + ComPtr buffer; + ComPtr new_sample; + + hr = MFCreateMemoryBuffer (out_stream_info.cbSize, + buffer.GetAddressOf ()); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't create memory buffer"); + return GST_FLOW_ERROR; + } + + hr = MFCreateSample (new_sample.GetAddressOf ()); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't create sample"); + return GST_FLOW_ERROR; + } + + hr = new_sample->AddBuffer (buffer.Get ()); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't add buffer to sample"); + return GST_FLOW_ERROR; + } + + out_data.pSample = new_sample.Detach (); + } + + out_data.dwStreamID = stream_id; + + hr = transform->ProcessOutput (0, 1, &out_data, &status); + + if (self->hardware) + self->pending_have_output--; + + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + GST_LOG_OBJECT (self, "Need more input data"); + return GST_MF_TRANSFORM_FLOW_NEED_DATA; + } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + ComPtr output_type; + + GST_DEBUG_OBJECT (self, "Stream change, set output type again"); + + hr = transform->GetOutputAvailableType (stream_id, + 0, output_type.GetAddressOf ()); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't get available output type"); + return GST_FLOW_ERROR; + } + + hr = transform->SetOutputType (stream_id, output_type.Get (), 0); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't set output type"); + return GST_FLOW_ERROR; + } + + return GST_MF_TRANSFORM_FLOW_NEED_DATA; + } else if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "ProcessOutput error"); + if (out_data.pSample) + out_data.pSample->Release (); + return GST_FLOW_ERROR; + } + + if (!out_data.pSample) { + GST_WARNING_OBJECT (self, "No output sample"); + return GST_FLOW_OK; + } + + g_queue_push_tail (self->output_queue, out_data.pSample); + + return GST_FLOW_OK; +} + +static gboolean +gst_mf_transform_process_input_sync (GstMFTransform * self, + IMFSample * sample) +{ + HRESULT hr; + + hr = self->transform->ProcessInput (self->output_id, sample, 0); + + if (self->hardware) + self->pending_need_input--; + + return gst_mf_result (hr); +} + +gboolean +gst_mf_transform_process_input (GstMFTransform * object, + IMFSample * sample) +{ + HRESULT hr; + GstFlowReturn ret; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (sample != NULL, FALSE); + + if (!object->transform) + return FALSE; + + if (object->need_start) { + hr = object->transform->ProcessMessage (MFT_MESSAGE_NOTIFY_START_OF_STREAM, + 0); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (object, "Cannot post start-of-stream message"); + return FALSE; + } + + hr = object->transform->ProcessMessage (MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, + 0); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (object, "Cannot post begin-stream message"); + return FALSE; + } + + object->need_start = FALSE; + } + + gst_mf_transform_drain_all_events (object); + + if (object->hardware) { + while (object->pending_have_output > 0) { + ret = gst_mf_transform_process_output (object); + if (ret != GST_FLOW_OK) { + if (ret == GST_VIDEO_ENCODER_FLOW_NEED_DATA) { + ret = GST_FLOW_OK; + break; + } else { + return FALSE; + } + } + } + + while (object->pending_need_input == 0) { + MediaEventType type; + HRESULT hr; + + hr = gst_mf_transform_pop_event (object, FALSE, &type); + if (hr != MF_E_NO_EVENTS_AVAILABLE && !gst_mf_result (hr)) { + GST_DEBUG_OBJECT (object, "failed to pop event, hr: 0x%x", (guint) hr); + return FALSE; + } + + switch (type) { + case METransformNeedInput: + object->pending_need_input++; + break; + case METransformHaveOutput: + object->pending_have_output++; + break; + default: + GST_DEBUG_OBJECT (object, "Unhandled event %d", type); + break; + } + } + } + + return gst_mf_transform_process_input_sync (object, sample); +} + +GstFlowReturn +gst_mf_transform_get_output (GstMFTransform * object, + IMFSample ** sample) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), GST_FLOW_ERROR); + g_return_val_if_fail (sample != NULL, GST_FLOW_ERROR); + + if (!object->transform) + return GST_FLOW_ERROR; + + gst_mf_transform_drain_all_events (object); + + if (!object->hardware || object->pending_have_output) + gst_mf_transform_process_output (object); + + if (g_queue_is_empty (object->output_queue)) + return GST_MF_TRANSFORM_FLOW_NEED_DATA; + + *sample = (IMFSample *) g_queue_pop_head (object->output_queue); + + return GST_FLOW_OK; +} + +gboolean +gst_mf_transform_flush (GstMFTransform * object) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + if (object->transform) { + if (!object->need_start) + object->transform->ProcessMessage (MFT_MESSAGE_COMMAND_FLUSH, 0); + + object->pending_have_output = 0; + object->pending_need_input = 0; + } + + object->need_start = TRUE; + + while (!g_queue_is_empty (object->output_queue)) { + IMFSample *sample = (IMFSample *) g_queue_pop_head (object->output_queue); + sample->Release (); + } + + return TRUE; +} + +gboolean +gst_mf_transform_drain (GstMFTransform * object) +{ + GstFlowReturn ret; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + if (!object->transform) + return TRUE; + + object->need_start = TRUE; + object->transform->ProcessMessage (MFT_MESSAGE_COMMAND_DRAIN, 0); + + if (object->hardware) { + MediaEventType type; + HRESULT hr; + + do { + hr = gst_mf_transform_pop_event (object, FALSE, &type); + if (hr != MF_E_NO_EVENTS_AVAILABLE && FAILED (hr)) { + GST_DEBUG_OBJECT (object, "failed to pop event, hr: 0x%x", (guint) hr); + break; + } + + switch (type) { + case METransformNeedInput: + GST_DEBUG_OBJECT (object, "Ignore need input during finish"); + break; + case METransformHaveOutput: + object->pending_have_output++; + gst_mf_transform_process_output (object); + break; + case METransformDrainComplete: + GST_DEBUG_OBJECT (object, "Drain complete"); + return TRUE; + default: + GST_DEBUG_OBJECT (object, "Unhandled event %d", type); + break; + } + } while (SUCCEEDED (hr)); + + /* and drain all the other events if any */ + gst_mf_transform_drain_all_events (object); + + object->pending_have_output = 0; + object->pending_need_input = 0; + } else { + do { + ret = gst_mf_transform_process_output (object); + } while (ret == GST_FLOW_OK); + } + + return TRUE; +} + +gboolean +gst_mf_transform_open (GstMFTransform * object) +{ + HRESULT hr; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + gst_mf_transform_close (object); + + hr = object->activate->ActivateObject (IID_IMFTransform, + (void **) &object->transform); + + if (!gst_mf_result (hr)) { + GST_WARNING_OBJECT (object, "Couldn't open MFT"); + return FALSE; + } + + if (object->hardware) { + ComPtr attr; + + hr = object->transform->GetAttributes (attr.GetAddressOf ()); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (object, "Couldn't get attribute object"); + goto error; + } + + hr = attr->SetUINT32 (MF_TRANSFORM_ASYNC_UNLOCK, TRUE); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (object, "MF_TRANSFORM_ASYNC_UNLOCK error"); + goto error; + } + + hr = object->transform->QueryInterface (IID_IMFMediaEventGenerator, + (void **) &object->event_gen); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (object, "IMFMediaEventGenerator unavailable"); + goto error; + } + } + + hr = object->transform->GetStreamIDs (1, &object->input_id, 1, + &object->output_id); + if (hr == E_NOTIMPL) { + object->input_id = 0; + object->output_id = 0; + } + + hr = object->transform->QueryInterface (IID_ICodecAPI, + (void **) &object->codec_api); + if (!gst_mf_result (hr)) { + GST_WARNING_OBJECT (object, "ICodecAPI is unavailable"); + } + + return TRUE; + +error: + gst_mf_transform_close (object); + return FALSE; +} + +gboolean +gst_mf_transform_close (GstMFTransform * object) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + gst_mf_transform_flush (object); + + if (object->event_gen) { + object->event_gen->Release (); + object->event_gen = NULL; + } + + if (object->codec_api) { + object->codec_api->Release (); + object->codec_api = NULL; + } + + if (object->transform) { + object->transform->Release (); + object->transform = NULL; + } + + return TRUE; +} + +IMFActivate * +gst_mf_transform_get_activate_handle (GstMFTransform * object) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), NULL); + + return object->activate; +} + +IMFTransform * +gst_mf_transform_get_transform_handle (GstMFTransform * object) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), NULL); + + if (!object->transform) { + GST_WARNING_OBJECT (object, + "IMFTransform is not configured, open MFT first"); + return NULL; + } + + return object->transform; +} + +ICodecAPI * +gst_mf_transform_get_codec_api_handle (GstMFTransform * object) +{ + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), NULL); + + if (!object->codec_api) { + GST_WARNING_OBJECT (object, + "ICodecAPI is not configured, open MFT first"); + return NULL; + } + + return object->codec_api; +} + +gboolean +gst_mf_transform_get_input_available_types (GstMFTransform * object, + GList ** input_types) +{ + IMFTransform *transform; + HRESULT hr; + DWORD index = 0; + GList *list = NULL; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (input_types != NULL, FALSE); + + transform = object->transform; + + if (!transform) { + GST_ERROR_OBJECT (object, "Should open first"); + return FALSE; + } + + do { + IMFMediaType *type = NULL; + + hr = transform->GetInputAvailableType (object->input_id, index, &type); + if (SUCCEEDED (hr)) + list = g_list_append (list, type); + + index++; + } while (SUCCEEDED (hr)); + + *input_types = list; + + return !!list; +} + +gboolean +gst_mf_transform_get_output_available_types (GstMFTransform * object, + GList ** output_types) +{ + IMFTransform *transform; + HRESULT hr; + DWORD index = 0; + GList *list = NULL; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (output_types != NULL, FALSE); + + transform = object->transform; + + if (!transform) { + GST_ERROR_OBJECT (object, "Should open first"); + return FALSE; + } + + do { + IMFMediaType *type; + + hr = transform->GetOutputAvailableType (object->input_id, index, &type); + if (SUCCEEDED (hr)) + list = g_list_append (list, type); + + index++; + } while (SUCCEEDED (hr)); + + *output_types = list; + + return !!list; +} + +gboolean +gst_mf_transform_set_input_type (GstMFTransform * object, + IMFMediaType * input_type) +{ + IMFTransform *transform; + HRESULT hr; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + transform = object->transform; + + if (!transform) { + GST_ERROR_OBJECT (object, "Should open first"); + return FALSE; + } + + hr = transform->SetInputType (object->input_id, input_type, 0); + if (!gst_mf_result (hr)) + return FALSE; + + return TRUE; +} + +gboolean +gst_mf_transform_set_output_type (GstMFTransform * object, + IMFMediaType * output_type) +{ + IMFTransform *transform; + HRESULT hr; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + + transform = object->transform; + + if (!transform) { + GST_ERROR_OBJECT (object, "Should open first"); + return FALSE; + } + + hr = transform->SetOutputType (object->output_id, output_type, 0); + if (!gst_mf_result (hr)) { + return FALSE; + } + + return TRUE; +} + +GstMFTransform * +gst_mf_transform_new (GstMFTransformEnumParams * params) +{ + GstMFTransform *self; + + g_return_val_if_fail (params != NULL, NULL); + + self = (GstMFTransform *) g_object_new (GST_TYPE_MF_TRANSFORM_OBJECT, + "enum-params", params, NULL); + + if (!self->initialized) { + gst_object_unref (self); + return NULL; + } + + gst_object_ref_sink (self); + + return self; +} + +gboolean +gst_mf_transform_set_codec_api_uint32 (GstMFTransform * object, + const GUID * api, guint32 value) +{ + HRESULT hr; + VARIANT var; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (api != NULL, FALSE); + + if (!object->codec_api) { + GST_WARNING_OBJECT (object, "codec api unavailable"); + return FALSE; + } + + VariantInit (&var); + var.vt = VT_UI4; + var.ulVal = value; + + hr = object->codec_api->SetValue (api, &var); + VariantClear (&var); + + return gst_mf_result (hr); +} + +gboolean +gst_mf_transform_set_codec_api_uint64 (GstMFTransform * object, + const GUID * api, guint64 value) +{ + HRESULT hr; + VARIANT var; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (api != NULL, FALSE); + + if (!object->codec_api) { + GST_WARNING_OBJECT (object, "codec api unavailable"); + return FALSE; + } + + VariantInit (&var); + var.vt = VT_UI8; + var.ullVal = value; + + hr = object->codec_api->SetValue (api, &var); + VariantClear (&var); + + return gst_mf_result (hr); +} + +gboolean +gst_mf_transform_set_codec_api_boolean (GstMFTransform * object, + const GUID * api, gboolean value) +{ + HRESULT hr; + VARIANT var; + + g_return_val_if_fail (GST_IS_MF_TRANSFORM (object), FALSE); + g_return_val_if_fail (api != NULL, FALSE); + + if (!object->codec_api) { + GST_WARNING_OBJECT (object, "codec api unavailable"); + return FALSE; + } + + VariantInit (&var); + var.vt = VT_BOOL; + var.boolVal = value ? VARIANT_TRUE : VARIANT_FALSE; + + hr = object->codec_api->SetValue (api, &var); + VariantClear (&var); + + return gst_mf_result (hr); +} + diff --git a/sys/mediafoundation/gstmftransform.h b/sys/mediafoundation/gstmftransform.h new file mode 100644 index 0000000000..670d758628 --- /dev/null +++ b/sys/mediafoundation/gstmftransform.h @@ -0,0 +1,95 @@ +/* GStreamer + * Copyright (C) 2020 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_MF_TRANSFORM_OBJECT_H__ +#define __GST_MF_TRANSFORM_OBJECT_H__ + +#include +#include "gstmfutils.h" +#include +#include "gststrmif.h" + +G_BEGIN_DECLS + +#define GST_TYPE_MF_TRANSFORM_OBJECT (gst_mf_transform_get_type()) +G_DECLARE_FINAL_TYPE (GstMFTransform, gst_mf_transform, + GST, MF_TRANSFORM, GstObject); + +#define GST_MF_TRANSFORM_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS + +typedef struct _GstMFTransformEnumParams +{ + GUID category; + guint32 enum_flags; + MFT_REGISTER_TYPE_INFO *input_typeinfo; + MFT_REGISTER_TYPE_INFO *output_typeinfo; + + guint device_index; +} GstMFTransformEnumParams; + +GstMFTransform * gst_mf_transform_new (GstMFTransformEnumParams * params); + +gboolean gst_mf_transform_open (GstMFTransform * object); + +gboolean gst_mf_transform_close (GstMFTransform * object); + +IMFActivate * gst_mf_transform_get_activate_handle (GstMFTransform * object); + +IMFTransform * gst_mf_transform_get_transform_handle (GstMFTransform * object); + +ICodecAPI * gst_mf_transform_get_codec_api_handle (GstMFTransform * object); + +gboolean gst_mf_transform_process_input (GstMFTransform * object, + IMFSample * sample); + +GstFlowReturn gst_mf_transform_get_output (GstMFTransform * object, + IMFSample ** sample); + +gboolean gst_mf_transform_flush (GstMFTransform * object); + +gboolean gst_mf_transform_drain (GstMFTransform * object); + +gboolean gst_mf_transform_get_input_available_types (GstMFTransform * object, + GList ** input_types); + +gboolean gst_mf_transform_get_output_available_types (GstMFTransform * object, + GList ** output_types); + +gboolean gst_mf_transform_set_input_type (GstMFTransform * object, + IMFMediaType * input_type); + +gboolean gst_mf_transform_set_output_type (GstMFTransform * object, + IMFMediaType * output_type); + +gboolean gst_mf_transform_set_codec_api_uint32 (GstMFTransform * object, + const GUID * api, + guint32 value); + +gboolean gst_mf_transform_set_codec_api_uint64 (GstMFTransform * object, + const GUID * api, + guint64 value); + +gboolean gst_mf_transform_set_codec_api_boolean (GstMFTransform * object, + const GUID * api, + gboolean value); + +G_END_DECLS + +#endif /* __GST_MF_TRANSFORM_OBJECT_H__ */ \ No newline at end of file diff --git a/sys/mediafoundation/gstmfvideoenc.cpp b/sys/mediafoundation/gstmfvideoenc.cpp new file mode 100644 index 0000000000..7cbe061353 --- /dev/null +++ b/sys/mediafoundation/gstmfvideoenc.cpp @@ -0,0 +1,574 @@ +/* GStreamer + * Copyright (C) 2020 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "gstmfvideoenc.h" +#include + +using namespace Microsoft::WRL; + +GST_DEBUG_CATEGORY (gst_mf_video_enc_debug); +#define GST_CAT_DEFAULT gst_mf_video_enc_debug + +#define gst_mf_video_enc_parent_class parent_class +G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstMFVideoEnc, gst_mf_video_enc, + GST_TYPE_VIDEO_ENCODER, + GST_DEBUG_CATEGORY_INIT (gst_mf_video_enc_debug, "mfvideoenc", 0, + "mfvideoenc")); + +static gboolean gst_mf_video_enc_open (GstVideoEncoder * enc); +static gboolean gst_mf_video_enc_close (GstVideoEncoder * enc); +static gboolean gst_mf_video_enc_set_format (GstVideoEncoder * enc, + GstVideoCodecState * state); +static GstFlowReturn gst_mf_video_enc_handle_frame (GstVideoEncoder * enc, + GstVideoCodecFrame * frame); +static GstFlowReturn gst_mf_video_enc_finish (GstVideoEncoder * enc); +static gboolean gst_mf_video_enc_flush (GstVideoEncoder * enc); + +static void +gst_mf_video_enc_class_init (GstMFVideoEncClass * klass) +{ + GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass); + + videoenc_class->open = GST_DEBUG_FUNCPTR (gst_mf_video_enc_open); + videoenc_class->close = GST_DEBUG_FUNCPTR (gst_mf_video_enc_close); + videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_mf_video_enc_set_format); + videoenc_class->handle_frame = + GST_DEBUG_FUNCPTR (gst_mf_video_enc_handle_frame); + videoenc_class->finish = GST_DEBUG_FUNCPTR (gst_mf_video_enc_finish); + videoenc_class->flush = GST_DEBUG_FUNCPTR (gst_mf_video_enc_flush); +} + +static void +gst_mf_video_enc_init (GstMFVideoEnc * self) +{ +} + +static gboolean +gst_mf_video_enc_open (GstVideoEncoder * enc) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (enc); + GstMFTransformEnumParams enum_params = { 0, }; + MFT_REGISTER_TYPE_INFO output_type; + gboolean ret; + + output_type.guidMajorType = MFMediaType_Video; + output_type.guidSubtype = klass->codec_id; + + enum_params.category = MFT_CATEGORY_VIDEO_ENCODER; + enum_params.enum_flags = klass->enum_flags; + enum_params.output_typeinfo = &output_type; + enum_params.device_index = klass->device_index; + + GST_DEBUG_OBJECT (self, "Create MFT with enum flags 0x%x, device index %d", + klass->enum_flags, klass->device_index); + + self->transform = gst_mf_transform_new (&enum_params); + ret = !!self->transform; + + if (!ret) + GST_ERROR_OBJECT (self, "Cannot create MFT object"); + + return ret; +} + +static gboolean +gst_mf_video_enc_close (GstVideoEncoder * enc) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + + gst_clear_object (&self->transform); + + if (self->input_state) { + gst_video_codec_state_unref (self->input_state); + self->input_state = NULL; + } + + return TRUE; +} + +static void +gst_mf_media_type_release (IMFMediaType * type) +{ + if (type) + type->Release (); +} + +static gboolean +gst_mf_video_enc_set_format (GstVideoEncoder * enc, GstVideoCodecState * state) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (enc); + GstVideoInfo *info = &state->info; + ComPtr in_type; + ComPtr out_type; + GList *input_types = NULL; + GList *iter; + HRESULT hr; + gint fps_n, fps_d; + + GST_DEBUG_OBJECT (self, "Set format"); + + gst_mf_video_enc_finish (enc); + + if (self->input_state) + gst_video_codec_state_unref (self->input_state); + self->input_state = gst_video_codec_state_ref (state); + + if (!gst_mf_transform_open (self->transform)) { + GST_ERROR_OBJECT (self, "Failed to open MFT"); + return FALSE; + } + + hr = MFCreateMediaType (out_type.GetAddressOf ()); + if (!gst_mf_result (hr)) + return FALSE; + + hr = out_type->SetGUID (MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!gst_mf_result (hr)) + return FALSE; + + if (klass->set_option) { + if (!klass->set_option (self, out_type.Get ())) { + GST_ERROR_OBJECT (self, "subclass failed to set option"); + return FALSE; + } + } + + fps_n = GST_VIDEO_INFO_FPS_N (info); + fps_d = GST_VIDEO_INFO_FPS_D (info); + if (fps_n == 0 || fps_d == 0) { + fps_n = 0; + fps_d = 1; + } + + hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, fps_n, fps_d); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, + "Couldn't set framerate %d/%d, hr: 0x%x", (guint) hr); + return FALSE; + } + + hr = MFSetAttributeSize (out_type.Get (), MF_MT_FRAME_SIZE, + GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info)); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, + "Couldn't set resolution %dx%d, hr: 0x%x", GST_VIDEO_INFO_WIDTH (info), + GST_VIDEO_INFO_HEIGHT (info), (guint) hr); + return FALSE; + } + + hr = out_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, + "Couldn't set interlace mode, hr: 0x%x", (guint) hr); + return FALSE; + } + + if (!gst_mf_transform_set_output_type (self->transform, out_type.Get ())) { + GST_ERROR_OBJECT (self, "Couldn't set output type"); + return FALSE; + } + + if (!gst_mf_transform_get_input_available_types (self->transform, + &input_types)) { + GST_ERROR_OBJECT (self, "Couldn't get available input types"); + return FALSE; + } + + for (iter = input_types; iter; iter = g_list_next (iter)) { + GstVideoFormat format; + GUID subtype; + IMFMediaType *type = (IMFMediaType *) iter->data; + + hr = type->GetGUID (MF_MT_SUBTYPE, &subtype); + if (!gst_mf_result (hr)) + continue; + + format = gst_mf_video_subtype_to_video_format (&subtype); + if (format != GST_VIDEO_INFO_FORMAT (info)) + continue; + + in_type = type; + } + + g_list_free_full (input_types, (GDestroyNotify) gst_mf_media_type_release); + + if (!in_type) { + GST_ERROR_OBJECT (self, + "Couldn't convert input caps %" GST_PTR_FORMAT " to media type", + state->caps); + return FALSE; + } + + hr = MFSetAttributeSize (in_type.Get (), MF_MT_FRAME_SIZE, + GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info)); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't set frame size %dx%d", + GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info)); + return FALSE; + } + + hr = in_type->SetUINT32 (MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, + "Couldn't set interlace mode, hr: 0x%x", (guint) hr); + return FALSE; + } + + hr = MFSetAttributeRatio (in_type.Get (), MF_MT_PIXEL_ASPECT_RATIO, + GST_VIDEO_INFO_PAR_N (info), GST_VIDEO_INFO_PAR_D (info)); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't set par %d/%d", + GST_VIDEO_INFO_PAR_N (info), GST_VIDEO_INFO_PAR_D (info)); + return FALSE; + } + + hr = MFSetAttributeRatio (in_type.Get (), MF_MT_FRAME_RATE, fps_n, fps_d); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't set framerate ratio %d/%d", fps_n, fps_d); + return FALSE; + } + + hr = in_type->SetUINT32 (MF_MT_DEFAULT_STRIDE, + GST_VIDEO_INFO_PLANE_STRIDE (info, 0)); + if (!gst_mf_result (hr)) { + GST_ERROR_OBJECT (self, "Couldn't set default stride"); + return FALSE; + } + + if (!gst_mf_transform_set_input_type (self->transform, in_type.Get ())) { + GST_ERROR_OBJECT (self, "Couldn't set input media type"); + return FALSE; + } + + g_assert (klass->set_src_caps != NULL); + if (!klass->set_src_caps (self, self->input_state, out_type.Get ())) { + GST_ERROR_OBJECT (self, "subclass couldn't set src caps"); + return FALSE; + } + + return TRUE; +} + +typedef struct +{ + GstClockTime mf_pts; +} GstMFVideoEncFrameData; + +static gboolean +gst_mf_video_enc_process_input (GstMFVideoEnc * self, + GstVideoCodecFrame * frame) +{ + GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (self); + HRESULT hr; + ComPtr sample; + ComPtr media_buffer; + GstVideoInfo *info = &self->input_state->info; + gint i, j; + BYTE *data; + GstVideoFrame vframe; + gboolean res = FALSE; + gboolean unset_force_keyframe = FALSE; + GstMFVideoEncFrameData *frame_data = NULL; + + if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) { + GST_ERROR_OBJECT (self, "Couldn't map input frame"); + gst_video_codec_frame_unref (frame); + return FALSE; + } + + hr = MFCreateSample (sample.GetAddressOf ()); + if (!gst_mf_result (hr)) + goto done; + + hr = MFCreateMemoryBuffer (GST_VIDEO_INFO_SIZE (info), + media_buffer.GetAddressOf ()); + if (!gst_mf_result (hr)) + goto done; + + hr = media_buffer->Lock (&data, NULL, NULL); + if (!gst_mf_result (hr)) + goto done; + + for (i = 0; i < GST_VIDEO_INFO_N_PLANES (info); i++) { + guint8 *src, *dst; + gint src_stride, dst_stride; + gint width; + + src = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, i); + dst = data + GST_VIDEO_INFO_PLANE_OFFSET (info, i); + + src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, i); + dst_stride = GST_VIDEO_INFO_PLANE_STRIDE (info, i); + + width = GST_VIDEO_INFO_COMP_WIDTH (info, i) + * GST_VIDEO_INFO_COMP_PSTRIDE (info, i); + + for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (info, i); j++) { + memcpy (dst, src, width); + src += src_stride; + dst += dst_stride; + } + } + + media_buffer->Unlock (); + + hr = media_buffer->SetCurrentLength (GST_VIDEO_INFO_SIZE (info)); + if (!gst_mf_result (hr)) + goto done; + + hr = sample->AddBuffer (media_buffer.Get ()); + if (!gst_mf_result (hr)) + goto done; + + frame_data = g_new0 (GstMFVideoEncFrameData, 1); + frame_data->mf_pts = frame->pts / 100; + + gst_video_codec_frame_set_user_data (frame, + frame_data, (GDestroyNotify) g_free); + + hr = sample->SetSampleTime (frame_data->mf_pts); + if (!gst_mf_result (hr)) + goto done; + + hr = sample->SetSampleDuration ( + GST_CLOCK_TIME_IS_VALID (frame->duration) ? frame->duration / 100 : 0); + if (!gst_mf_result (hr)) + goto done; + + if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) { + if (klass->can_force_keyframe) { + unset_force_keyframe = + gst_mf_transform_set_codec_api_uint32 (self->transform, + &CODECAPI_AVEncVideoForceKeyFrame, TRUE); + } else { + GST_WARNING_OBJECT (self, "encoder does not support force keyframe"); + } + } + + if (!gst_mf_transform_process_input (self->transform, sample.Get ())) { + GST_ERROR_OBJECT (self, "Failed to process input"); + goto done; + } + + if (unset_force_keyframe) { + gst_mf_transform_set_codec_api_uint32 (self->transform, + &CODECAPI_AVEncVideoForceKeyFrame, FALSE); + } + + res = TRUE; + +done: + gst_video_frame_unmap (&vframe); + + return res; +} + +static GstVideoCodecFrame * +gst_mf_video_enc_find_output_frame (GstMFVideoEnc * self, UINT64 mf_dts, + UINT64 mf_pts) +{ + GList *l, *walk = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self)); + GstVideoCodecFrame *ret = NULL; + + for (l = walk; l; l = l->next) { + GstVideoCodecFrame *frame = (GstVideoCodecFrame *) l->data; + GstMFVideoEncFrameData *data = (GstMFVideoEncFrameData *) + gst_video_codec_frame_get_user_data (frame); + + if (!data) + continue; + + if (mf_dts == data->mf_pts) { + ret = frame; + break; + } + } + + /* find target with pts */ + if (!ret) { + for (l = walk; l; l = l->next) { + GstVideoCodecFrame *frame = (GstVideoCodecFrame *) l->data; + GstMFVideoEncFrameData *data = (GstMFVideoEncFrameData *) + gst_video_codec_frame_get_user_data (frame); + + if (!data) + continue; + + if (mf_pts == data->mf_pts) { + ret = frame; + break; + } + } + } + + if (ret) { + gst_video_codec_frame_ref (ret); + } else { + /* just return the oldest one */ + ret = gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (self)); + } + + if (walk) + g_list_free_full (walk, (GDestroyNotify) gst_video_codec_frame_unref); + + return ret; +} + +static GstFlowReturn +gst_mf_video_enc_process_output (GstMFVideoEnc * self) +{ + HRESULT hr; + BYTE *data; + ComPtr media_buffer; + ComPtr sample; + GstBuffer *buffer; + GstFlowReturn res = GST_FLOW_ERROR; + GstVideoCodecFrame *frame; + LONGLONG sample_timestamp; + LONGLONG sample_duration; + UINT32 keyframe = FALSE; + UINT64 mf_dts = GST_CLOCK_TIME_NONE; + DWORD buffer_len; + + res = gst_mf_transform_get_output (self->transform, sample.GetAddressOf ()); + + if (res != GST_FLOW_OK) + return res; + + hr = sample->GetBufferByIndex (0, media_buffer.GetAddressOf ()); + if (!gst_mf_result (hr)) + return GST_FLOW_ERROR; + + hr = media_buffer->Lock (&data, NULL, &buffer_len); + if (!gst_mf_result (hr)) + return GST_FLOW_ERROR; + + buffer = gst_buffer_new_allocate (NULL, buffer_len, NULL); + gst_buffer_fill (buffer, 0, data, buffer_len); + media_buffer->Unlock (); + + sample->GetSampleTime (&sample_timestamp); + sample->GetSampleDuration (&sample_duration); + sample->GetUINT32 (MFSampleExtension_CleanPoint, &keyframe); + + hr = sample->GetUINT64 (MFSampleExtension_DecodeTimestamp, &mf_dts); + if (FAILED (hr)) + mf_dts = sample_timestamp; + + frame = gst_mf_video_enc_find_output_frame (self, + mf_dts, (UINT64) sample_timestamp); + + if (frame) { + if (keyframe) { + GST_DEBUG_OBJECT (self, "Keyframe pts %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->pts)); + GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } else { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } + + frame->pts = sample_timestamp * 100; + frame->dts = mf_dts * 100; + frame->duration = sample_duration * 100; + frame->output_buffer = buffer; + + res = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame); + } else { + GST_BUFFER_DTS (buffer) = mf_dts * 100; + GST_BUFFER_PTS (buffer) = sample_timestamp * 100; + GST_BUFFER_DURATION (buffer) = sample_duration * 100; + + if (keyframe) { + GST_DEBUG_OBJECT (self, "Keyframe pts %" GST_TIME_FORMAT, + GST_BUFFER_PTS (buffer)); + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } else { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } + + res = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (self), buffer); + } + + return res; +} + +static GstFlowReturn +gst_mf_video_enc_handle_frame (GstVideoEncoder * enc, + GstVideoCodecFrame * frame) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + GstFlowReturn ret; + + if (!gst_mf_video_enc_process_input (self, frame)) { + GST_ERROR_OBJECT (self, "Failed to process input"); + return GST_FLOW_ERROR; + } + + do { + ret = gst_mf_video_enc_process_output (self); + } while (ret == GST_FLOW_OK); + + if (ret == GST_MF_TRANSFORM_FLOW_NEED_DATA) + ret = GST_FLOW_OK; + + return ret; +} + +static GstFlowReturn +gst_mf_video_enc_finish (GstVideoEncoder * enc) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + GstFlowReturn ret = GST_FLOW_OK; + + if (!self->transform) + return GST_FLOW_OK; + + gst_mf_transform_drain (self->transform); + + do { + ret = gst_mf_video_enc_process_output (self); + } while (ret == GST_FLOW_OK); + + if (ret == GST_MF_TRANSFORM_FLOW_NEED_DATA) + ret = GST_FLOW_OK; + + return ret; +} + +static gboolean +gst_mf_video_enc_flush (GstVideoEncoder * enc) +{ + GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc); + + if (!self->transform) + return TRUE; + + gst_mf_transform_flush (self->transform); + + return TRUE; +} diff --git a/sys/mediafoundation/gstmfvideoenc.h b/sys/mediafoundation/gstmfvideoenc.h new file mode 100644 index 0000000000..5b3e573ce8 --- /dev/null +++ b/sys/mediafoundation/gstmfvideoenc.h @@ -0,0 +1,71 @@ +/* GStreamer + * Copyright (C) 2020 Seungha Yang + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_MF_VIDEO_ENC_H__ +#define __GST_MF_VIDEO_ENC_H__ + +#include +#include +#include "gstmfutils.h" +#include "gstmftransform.h" + +G_BEGIN_DECLS + +#define GST_TYPE_MF_VIDEO_ENC (gst_mf_video_enc_get_type()) +#define GST_MF_VIDEO_ENC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_VIDEO_ENC,GstMFVideoEnc)) +#define GST_MF_VIDEO_ENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_VIDEO_ENC,GstMFVideoEncClass)) +#define GST_MF_VIDEO_ENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_VIDEO_ENC,GstMFVideoEncClass)) +#define GST_IS_MF_VIDEO_ENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_VIDEO_ENC)) +#define GST_IS_MF_VIDEO_ENC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_VIDEO_ENC)) + +typedef struct _GstMFVideoEnc GstMFVideoEnc; +typedef struct _GstMFVideoEncClass GstMFVideoEncClass; + +struct _GstMFVideoEnc +{ + GstVideoEncoder parent; + + GstMFTransform *transform; + + GstVideoCodecState *input_state; +}; + +struct _GstMFVideoEncClass +{ + GstVideoEncoderClass parent_class; + + GUID codec_id; + guint32 enum_flags; + guint device_index; + gboolean can_force_keyframe; + + gboolean (*set_option) (GstMFVideoEnc * mfenc, + IMFMediaType * output_type); + + gboolean (*set_src_caps) (GstMFVideoEnc * mfenc, + GstVideoCodecState * state, + IMFMediaType * output_type); +}; + +GType gst_mf_video_enc_get_type (void); + +G_END_DECLS + +#endif /* __GST_MF_VIDEO_ENC_H__ */ \ No newline at end of file diff --git a/sys/mediafoundation/gststrmif.h b/sys/mediafoundation/gststrmif.h new file mode 100644 index 0000000000..1fff48b3c5 --- /dev/null +++ b/sys/mediafoundation/gststrmif.h @@ -0,0 +1,305 @@ +/* GStreamer + * Copyright (C) 2020 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_STRMIF_H__ +#define __GST_STRMIF_H__ + +#include + +/* From strmif.h. + * ICodecAPI interface will not be exposed + * for the !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) case + * but MSDN said the interface should be available on both + * desktop and UWP cases */ +#ifndef __ICodecAPI_INTERFACE_DEFINED__ +#define __ICodecAPI_INTERFACE_DEFINED__ + +/* interface ICodecAPI */ +/* [unique][uuid][object][local] */ + + +EXTERN_C const IID IID_ICodecAPI; + +#if defined(__cplusplus) && !defined(CINTERFACE) + + MIDL_INTERFACE("901db4c7-31ce-41a2-85dc-8fa0bf41b8da") + ICodecAPI : public IUnknown + { + public: + virtual HRESULT STDMETHODCALLTYPE IsSupported( + /* [in] */ const GUID *Api) = 0; + + virtual HRESULT STDMETHODCALLTYPE IsModifiable( + /* [in] */ const GUID *Api) = 0; + + virtual HRESULT STDMETHODCALLTYPE GetParameterRange( + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *ValueMin, + /* [annotation][out] */ + _Out_ VARIANT *ValueMax, + /* [annotation][out] */ + _Out_ VARIANT *SteppingDelta) = 0; + + virtual HRESULT STDMETHODCALLTYPE GetParameterValues( + /* [in] */ const GUID *Api, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ValuesCount) VARIANT **Values, + /* [annotation][out] */ + _Out_ ULONG *ValuesCount) = 0; + + virtual HRESULT STDMETHODCALLTYPE GetDefaultValue( + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *Value) = 0; + + virtual HRESULT STDMETHODCALLTYPE GetValue( + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *Value) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetValue( + /* [in] */ const GUID *Api, + /* [annotation][in] */ + _In_ VARIANT *Value) = 0; + + virtual HRESULT STDMETHODCALLTYPE RegisterForEvent( + /* [in] */ const GUID *Api, + /* [in] */ LONG_PTR userData) = 0; + + virtual HRESULT STDMETHODCALLTYPE UnregisterForEvent( + /* [in] */ const GUID *Api) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetAllDefaults( void) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetValueWithNotify( + /* [in] */ const GUID *Api, + /* [in] */ VARIANT *Value, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetAllDefaultsWithNotify( + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount) = 0; + + virtual HRESULT STDMETHODCALLTYPE GetAllSettings( + /* [in] */ IStream *__MIDL__ICodecAPI0000) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetAllSettings( + /* [in] */ IStream *__MIDL__ICodecAPI0001) = 0; + + virtual HRESULT STDMETHODCALLTYPE SetAllSettingsWithNotify( + IStream *__MIDL__ICodecAPI0002, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount) = 0; + + }; + + +#else /* C style interface */ + + typedef struct ICodecAPIVtbl + { + BEGIN_INTERFACE + + HRESULT ( STDMETHODCALLTYPE *QueryInterface )( + ICodecAPI * This, + /* [in] */ REFIID riid, + /* [annotation][iid_is][out] */ + _COM_Outptr_ void **ppvObject); + + ULONG ( STDMETHODCALLTYPE *AddRef )( + ICodecAPI * This); + + ULONG ( STDMETHODCALLTYPE *Release )( + ICodecAPI * This); + + HRESULT ( STDMETHODCALLTYPE *IsSupported )( + ICodecAPI * This, + /* [in] */ const GUID *Api); + + HRESULT ( STDMETHODCALLTYPE *IsModifiable )( + ICodecAPI * This, + /* [in] */ const GUID *Api); + + HRESULT ( STDMETHODCALLTYPE *GetParameterRange )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *ValueMin, + /* [annotation][out] */ + _Out_ VARIANT *ValueMax, + /* [annotation][out] */ + _Out_ VARIANT *SteppingDelta); + + HRESULT ( STDMETHODCALLTYPE *GetParameterValues )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ValuesCount) VARIANT **Values, + /* [annotation][out] */ + _Out_ ULONG *ValuesCount); + + HRESULT ( STDMETHODCALLTYPE *GetDefaultValue )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *Value); + + HRESULT ( STDMETHODCALLTYPE *GetValue )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [annotation][out] */ + _Out_ VARIANT *Value); + + HRESULT ( STDMETHODCALLTYPE *SetValue )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [annotation][in] */ + _In_ VARIANT *Value); + + HRESULT ( STDMETHODCALLTYPE *RegisterForEvent )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [in] */ LONG_PTR userData); + + HRESULT ( STDMETHODCALLTYPE *UnregisterForEvent )( + ICodecAPI * This, + /* [in] */ const GUID *Api); + + HRESULT ( STDMETHODCALLTYPE *SetAllDefaults )( + ICodecAPI * This); + + HRESULT ( STDMETHODCALLTYPE *SetValueWithNotify )( + ICodecAPI * This, + /* [in] */ const GUID *Api, + /* [in] */ VARIANT *Value, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount); + + HRESULT ( STDMETHODCALLTYPE *SetAllDefaultsWithNotify )( + ICodecAPI * This, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount); + + HRESULT ( STDMETHODCALLTYPE *GetAllSettings )( + ICodecAPI * This, + /* [in] */ IStream *__MIDL__ICodecAPI0000); + + HRESULT ( STDMETHODCALLTYPE *SetAllSettings )( + ICodecAPI * This, + /* [in] */ IStream *__MIDL__ICodecAPI0001); + + HRESULT ( STDMETHODCALLTYPE *SetAllSettingsWithNotify )( + ICodecAPI * This, + IStream *__MIDL__ICodecAPI0002, + /* [annotation][size_is][size_is][out] */ + _Outptr_result_buffer_all_(*ChangedParamCount) GUID **ChangedParam, + /* [annotation][out] */ + _Out_ ULONG *ChangedParamCount); + + END_INTERFACE + } ICodecAPIVtbl; + + interface ICodecAPI + { + CONST_VTBL struct ICodecAPIVtbl *lpVtbl; + }; + + + +#ifdef COBJMACROS + + +#define ICodecAPI_QueryInterface(This,riid,ppvObject) \ + ( (This)->lpVtbl -> QueryInterface(This,riid,ppvObject) ) + +#define ICodecAPI_AddRef(This) \ + ( (This)->lpVtbl -> AddRef(This) ) + +#define ICodecAPI_Release(This) \ + ( (This)->lpVtbl -> Release(This) ) + + +#define ICodecAPI_IsSupported(This,Api) \ + ( (This)->lpVtbl -> IsSupported(This,Api) ) + +#define ICodecAPI_IsModifiable(This,Api) \ + ( (This)->lpVtbl -> IsModifiable(This,Api) ) + +#define ICodecAPI_GetParameterRange(This,Api,ValueMin,ValueMax,SteppingDelta) \ + ( (This)->lpVtbl -> GetParameterRange(This,Api,ValueMin,ValueMax,SteppingDelta) ) + +#define ICodecAPI_GetParameterValues(This,Api,Values,ValuesCount) \ + ( (This)->lpVtbl -> GetParameterValues(This,Api,Values,ValuesCount) ) + +#define ICodecAPI_GetDefaultValue(This,Api,Value) \ + ( (This)->lpVtbl -> GetDefaultValue(This,Api,Value) ) + +#define ICodecAPI_GetValue(This,Api,Value) \ + ( (This)->lpVtbl -> GetValue(This,Api,Value) ) + +#define ICodecAPI_SetValue(This,Api,Value) \ + ( (This)->lpVtbl -> SetValue(This,Api,Value) ) + +#define ICodecAPI_RegisterForEvent(This,Api,userData) \ + ( (This)->lpVtbl -> RegisterForEvent(This,Api,userData) ) + +#define ICodecAPI_UnregisterForEvent(This,Api) \ + ( (This)->lpVtbl -> UnregisterForEvent(This,Api) ) + +#define ICodecAPI_SetAllDefaults(This) \ + ( (This)->lpVtbl -> SetAllDefaults(This) ) + +#define ICodecAPI_SetValueWithNotify(This,Api,Value,ChangedParam,ChangedParamCount) \ + ( (This)->lpVtbl -> SetValueWithNotify(This,Api,Value,ChangedParam,ChangedParamCount) ) + +#define ICodecAPI_SetAllDefaultsWithNotify(This,ChangedParam,ChangedParamCount) \ + ( (This)->lpVtbl -> SetAllDefaultsWithNotify(This,ChangedParam,ChangedParamCount) ) + +#define ICodecAPI_GetAllSettings(This,__MIDL__ICodecAPI0000) \ + ( (This)->lpVtbl -> GetAllSettings(This,__MIDL__ICodecAPI0000) ) + +#define ICodecAPI_SetAllSettings(This,__MIDL__ICodecAPI0001) \ + ( (This)->lpVtbl -> SetAllSettings(This,__MIDL__ICodecAPI0001) ) + +#define ICodecAPI_SetAllSettingsWithNotify(This,__MIDL__ICodecAPI0002,ChangedParam,ChangedParamCount) \ + ( (This)->lpVtbl -> SetAllSettingsWithNotify(This,__MIDL__ICodecAPI0002,ChangedParam,ChangedParamCount) ) + +#endif /* COBJMACROS */ + + +#endif /* C style interface */ + + + + +#endif /* __ICodecAPI_INTERFACE_DEFINED__ */ + +#endif /* __GST_STRMIF_H__ */ \ No newline at end of file diff --git a/sys/mediafoundation/meson.build b/sys/mediafoundation/meson.build index 28825a1802..57097c5eb4 100644 --- a/sys/mediafoundation/meson.build +++ b/sys/mediafoundation/meson.build @@ -1,6 +1,12 @@ mf_sources = [ 'plugin.c', 'gstmfutils.cpp', + 'gstmftransform.cpp', + 'gstmfvideoenc.cpp', + 'gstmfh264enc.cpp', +] + +mf_desktop_sources = [ 'gstmfvideosrc.c', 'gstmfsourceobject.c', 'gstmfsourcereader.cpp', @@ -13,6 +19,7 @@ mf_header_deps = [ 'mferror.h', 'strmif.h', 'mfobjects.h', + 'codecapi.h', ] winapi_desktop = false @@ -70,16 +77,13 @@ winapi_desktop = cxx.compiles('''#include #endif''', dependencies: mf_lib_deps, name: 'checking if building for Win32') -if not winapi_desktop - if mf_option.enabled() - error('The mediafoundation plugin was enabled explicitly, build target is not desktop app.') - endif - subdir_done() -endif -have_capture_engine = cc.has_header('mfcaptureengine.h') -if have_capture_engine - mf_sources += ['gstmfcaptureengine.cpp'] +if winapi_desktop + mf_sources += mf_desktop_sources + have_capture_engine = cc.has_header('mfcaptureengine.h') + if have_capture_engine + mf_sources += ['gstmfcaptureengine.cpp'] + endif endif mf_config.set10('HAVE_CAPTURE_ENGINE', have_capture_engine) @@ -94,7 +98,7 @@ gstmediafoundation = library('gstmediafoundation', c_args : gst_plugins_bad_args + extra_c_args, cpp_args : gst_plugins_bad_args, include_directories : [configinc], - dependencies : [gstbase_dep, gstvideo_dep] + mf_lib_deps, + dependencies : [gstbase_dep, gstvideo_dep, gstpbutils_dep] + mf_lib_deps, install : true, install_dir : plugins_install_dir, ) diff --git a/sys/mediafoundation/plugin.c b/sys/mediafoundation/plugin.c index c841aabd23..9cc0978e6d 100644 --- a/sys/mediafoundation/plugin.c +++ b/sys/mediafoundation/plugin.c @@ -22,16 +22,29 @@ #include "config.h" #endif +#include + #include +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) #include "gstmfvideosrc.h" +#endif #include "gstmfutils.h" +#include "gstmfh264enc.h" GST_DEBUG_CATEGORY (gst_mf_debug); GST_DEBUG_CATEGORY (gst_mf_utils_debug); +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) GST_DEBUG_CATEGORY (gst_mf_source_object_debug); +#endif +GST_DEBUG_CATEGORY (gst_mf_transform_debug); #define GST_CAT_DEFAULT gst_mf_debug +/* NOTE: If you want to use this plugin in UWP app, don't try to load/initialize + * this plugin on UI thread, since the UI thread would be STA Thread + * but this plugin will be initialized with COINIT_MULTITHREADED parameter. + * This rule can be applied over all GStreamer plugins which are involved with + * COM libraries */ static gboolean plugin_init (GstPlugin * plugin) { @@ -40,17 +53,24 @@ plugin_init (GstPlugin * plugin) GST_DEBUG_CATEGORY_INIT (gst_mf_debug, "mf", 0, "media foundation"); GST_DEBUG_CATEGORY_INIT (gst_mf_utils_debug, "mfutils", 0, "media foundation utility functions"); +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) GST_DEBUG_CATEGORY_INIT (gst_mf_source_object_debug, "mfsourceobject", 0, "mfsourceobject"); +#endif + GST_DEBUG_CATEGORY_INIT (gst_mf_transform_debug, + "mftransform", 0, "mftransform"); hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET); if (!gst_mf_result (hr)) { GST_WARNING ("MFStartup failure, hr: 0x%x", hr); return TRUE; } - +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) gst_element_register (plugin, "mfvideosrc", GST_RANK_SECONDARY, GST_TYPE_MF_VIDEO_SRC); +#endif + + gst_mf_h264_enc_plugin_init (plugin, GST_RANK_SECONDARY); return TRUE; }