mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-13 12:51:16 +00:00
mediafoundation: Cosmetic changes
Rename baseclass to be consistent with other Windows plugins Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1596>
This commit is contained in:
parent
ce4875ccde
commit
0b26254a6a
13 changed files with 367 additions and 342 deletions
|
@ -36,7 +36,7 @@
|
|||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/pbutils/pbutils.h>
|
||||
#include "gstmfaudioenc.h"
|
||||
#include "gstmfaudioencoder.h"
|
||||
#include "gstmfaacenc.h"
|
||||
#include <wrl.h>
|
||||
#include <set>
|
||||
|
@ -60,7 +60,7 @@ enum
|
|||
|
||||
typedef struct _GstMFAacEnc
|
||||
{
|
||||
GstMFAudioEnc parent;
|
||||
GstMFAudioEncoder parent;
|
||||
|
||||
/* properties */
|
||||
guint bitrate;
|
||||
|
@ -68,7 +68,7 @@ typedef struct _GstMFAacEnc
|
|||
|
||||
typedef struct _GstMFAacEncClass
|
||||
{
|
||||
GstMFAudioEncClass parent_class;
|
||||
GstMFAudioEncoderClass parent_class;
|
||||
|
||||
} GstMFAacEncClass;
|
||||
|
||||
|
@ -90,11 +90,11 @@ static void gst_mf_aac_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_mf_aac_enc_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static gboolean gst_mf_aac_enc_get_output_type (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_aac_enc_get_output_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** output_type);
|
||||
static gboolean gst_mf_aac_enc_get_input_type (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_aac_enc_get_input_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** input_type);
|
||||
static gboolean gst_mf_aac_enc_set_src_caps (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_aac_enc_set_src_caps (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info);
|
||||
|
||||
static void
|
||||
|
@ -102,7 +102,7 @@ gst_mf_aac_enc_class_init (GstMFAacEncClass * klass, gpointer data)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstMFAudioEncClass *mfenc_class = GST_MF_AUDIO_ENC_CLASS (klass);
|
||||
GstMFAudioEncoderClass *encoder_class = GST_MF_AUDIO_ENCODER_CLASS (klass);
|
||||
GstMFAacEncClassData *cdata = (GstMFAacEncClassData *) data;
|
||||
gchar *long_name;
|
||||
gchar *classification;
|
||||
|
@ -149,16 +149,16 @@ gst_mf_aac_enc_class_init (GstMFAacEncClass * klass, gpointer data)
|
|||
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
|
||||
cdata->src_caps));
|
||||
|
||||
mfenc_class->get_output_type =
|
||||
encoder_class->get_output_type =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_aac_enc_get_output_type);
|
||||
mfenc_class->get_input_type =
|
||||
encoder_class->get_input_type =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_aac_enc_get_input_type);
|
||||
mfenc_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_aac_enc_set_src_caps);
|
||||
encoder_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_aac_enc_set_src_caps);
|
||||
|
||||
mfenc_class->codec_id = MFAudioFormat_AAC;
|
||||
mfenc_class->enum_flags = cdata->enum_flags;
|
||||
mfenc_class->device_index = cdata->device_index;
|
||||
mfenc_class->frame_samples = 1024;
|
||||
encoder_class->codec_id = MFAudioFormat_AAC;
|
||||
encoder_class->enum_flags = cdata->enum_flags;
|
||||
encoder_class->device_index = cdata->device_index;
|
||||
encoder_class->frame_samples = 1024;
|
||||
|
||||
g_free (cdata->device_name);
|
||||
gst_caps_unref (cdata->sink_caps);
|
||||
|
@ -205,11 +205,11 @@ gst_mf_aac_enc_set_property (GObject * object, guint prop_id,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_aac_enc_get_output_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
||||
IMFMediaType ** output_type)
|
||||
gst_mf_aac_enc_get_output_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** output_type)
|
||||
{
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) mfenc;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) encoder;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
GList *output_list = nullptr;
|
||||
GList *iter;
|
||||
ComPtr < IMFMediaType > target_output;
|
||||
|
@ -355,11 +355,11 @@ gst_mf_aac_enc_get_output_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_aac_enc_get_input_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
||||
gst_mf_aac_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
||||
IMFMediaType ** input_type)
|
||||
{
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) mfenc;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) encoder;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
GList *input_list = nullptr;
|
||||
GList *iter;
|
||||
ComPtr < IMFMediaType > target_input;
|
||||
|
@ -432,9 +432,9 @@ gst_mf_aac_enc_get_input_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_aac_enc_set_src_caps (GstMFAudioEnc * mfenc, GstAudioInfo * info)
|
||||
gst_mf_aac_enc_set_src_caps (GstMFAudioEncoder * encoder, GstAudioInfo * info)
|
||||
{
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) mfenc;
|
||||
GstMFAacEnc *self = (GstMFAacEnc *) encoder;
|
||||
HRESULT hr;
|
||||
GstCaps *src_caps;
|
||||
GstBuffer *codec_data;
|
||||
|
@ -444,7 +444,7 @@ gst_mf_aac_enc_set_src_caps (GstMFAudioEnc * mfenc, GstAudioInfo * info)
|
|||
ComPtr < IMFMediaType > output_type;
|
||||
static const guint config_data_offset = 12;
|
||||
|
||||
if (!gst_mf_transform_get_output_current_type (mfenc->transform,
|
||||
if (!gst_mf_transform_get_output_current_type (encoder->transform,
|
||||
&output_type)) {
|
||||
GST_ERROR_OBJECT (self, "Couldn't get current output type");
|
||||
return FALSE;
|
||||
|
@ -546,7 +546,7 @@ gst_mf_aac_enc_register (GstPlugin * plugin, guint rank,
|
|||
}
|
||||
|
||||
type =
|
||||
g_type_register_static (GST_TYPE_MF_AUDIO_ENC, type_name, &type_info,
|
||||
g_type_register_static (GST_TYPE_MF_AUDIO_ENCODER, type_name, &type_info,
|
||||
(GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include "gstmfaudioenc.h"
|
||||
#include "gstmfaudioencoder.h"
|
||||
#include <wrl.h>
|
||||
#include <string.h>
|
||||
|
||||
|
@ -30,53 +30,54 @@
|
|||
using namespace Microsoft::WRL;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
GST_DEBUG_CATEGORY (gst_mf_audio_enc_debug);
|
||||
#define GST_CAT_DEFAULT gst_mf_audio_enc_debug
|
||||
GST_DEBUG_CATEGORY (gst_mf_audio_encoder_debug);
|
||||
#define GST_CAT_DEFAULT gst_mf_audio_encoder_debug
|
||||
|
||||
#define gst_mf_audio_enc_parent_class parent_class
|
||||
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstMFAudioEnc, gst_mf_audio_enc,
|
||||
#define gst_mf_audio_encoder_parent_class parent_class
|
||||
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstMFAudioEncoder, gst_mf_audio_encoder,
|
||||
GST_TYPE_AUDIO_ENCODER,
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_audio_enc_debug, "mfaudioenc", 0,
|
||||
"mfaudioenc"));
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_audio_encoder_debug, "mfaudioencoder", 0,
|
||||
"mfaudioencoder"));
|
||||
|
||||
static gboolean gst_mf_audio_enc_open (GstAudioEncoder * enc);
|
||||
static gboolean gst_mf_audio_enc_close (GstAudioEncoder * enc);
|
||||
static gboolean gst_mf_audio_enc_set_format (GstAudioEncoder * enc,
|
||||
static gboolean gst_mf_audio_encoder_open (GstAudioEncoder * enc);
|
||||
static gboolean gst_mf_audio_encoder_close (GstAudioEncoder * enc);
|
||||
static gboolean gst_mf_audio_encoder_set_format (GstAudioEncoder * enc,
|
||||
GstAudioInfo * info);
|
||||
static GstFlowReturn gst_mf_audio_enc_handle_frame (GstAudioEncoder * enc,
|
||||
static GstFlowReturn gst_mf_audio_encoder_handle_frame (GstAudioEncoder * enc,
|
||||
GstBuffer * buffer);
|
||||
static GstFlowReturn gst_mf_audio_enc_drain (GstAudioEncoder * enc);
|
||||
static void gst_mf_audio_enc_flush (GstAudioEncoder * enc);
|
||||
static GstFlowReturn gst_mf_audio_encoder_drain (GstAudioEncoder * enc);
|
||||
static void gst_mf_audio_encoder_flush (GstAudioEncoder * enc);
|
||||
|
||||
static void
|
||||
gst_mf_audio_enc_class_init (GstMFAudioEncClass * klass)
|
||||
gst_mf_audio_encoder_class_init (GstMFAudioEncoderClass * klass)
|
||||
{
|
||||
GstAudioEncoderClass *audioenc_class = GST_AUDIO_ENCODER_CLASS (klass);
|
||||
|
||||
audioenc_class->open = GST_DEBUG_FUNCPTR (gst_mf_audio_enc_open);
|
||||
audioenc_class->close = GST_DEBUG_FUNCPTR (gst_mf_audio_enc_close);
|
||||
audioenc_class->set_format = GST_DEBUG_FUNCPTR (gst_mf_audio_enc_set_format);
|
||||
audioenc_class->open = GST_DEBUG_FUNCPTR (gst_mf_audio_encoder_open);
|
||||
audioenc_class->close = GST_DEBUG_FUNCPTR (gst_mf_audio_encoder_close);
|
||||
audioenc_class->set_format =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_audio_encoder_set_format);
|
||||
audioenc_class->handle_frame =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_audio_enc_handle_frame);
|
||||
audioenc_class->flush = GST_DEBUG_FUNCPTR (gst_mf_audio_enc_flush);
|
||||
GST_DEBUG_FUNCPTR (gst_mf_audio_encoder_handle_frame);
|
||||
audioenc_class->flush = GST_DEBUG_FUNCPTR (gst_mf_audio_encoder_flush);
|
||||
|
||||
gst_type_mark_as_plugin_api (GST_TYPE_MF_AUDIO_ENC, (GstPluginAPIFlags) 0);
|
||||
gst_type_mark_as_plugin_api (GST_TYPE_MF_AUDIO_ENCODER,
|
||||
(GstPluginAPIFlags) 0);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_mf_audio_enc_init (GstMFAudioEnc * self)
|
||||
gst_mf_audio_encoder_init (GstMFAudioEncoder * self)
|
||||
{
|
||||
gst_audio_encoder_set_drainable (GST_AUDIO_ENCODER (self), TRUE);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_audio_enc_open (GstAudioEncoder * enc)
|
||||
gst_mf_audio_encoder_open (GstAudioEncoder * enc)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncClass *klass = GST_MF_AUDIO_ENC_GET_CLASS (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
GstMFAudioEncoderClass *klass = GST_MF_AUDIO_ENCODER_GET_CLASS (enc);
|
||||
GstMFTransformEnumParams enum_params = { 0, };
|
||||
MFT_REGISTER_TYPE_INFO output_type;
|
||||
gboolean ret;
|
||||
|
||||
output_type.guidMajorType = MFMediaType_Audio;
|
||||
output_type.guidSubtype = klass->codec_id;
|
||||
|
@ -90,18 +91,18 @@ gst_mf_audio_enc_open (GstAudioEncoder * enc)
|
|||
klass->enum_flags, klass->device_index);
|
||||
|
||||
self->transform = gst_mf_transform_new (&enum_params);
|
||||
ret = !!self->transform;
|
||||
|
||||
if (!ret)
|
||||
if (!self->transform) {
|
||||
GST_ERROR_OBJECT (self, "Cannot create MFT object");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return ret;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_audio_enc_close (GstAudioEncoder * enc)
|
||||
gst_mf_audio_encoder_close (GstAudioEncoder * enc)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
|
||||
gst_clear_object (&self->transform);
|
||||
|
||||
|
@ -109,16 +110,16 @@ gst_mf_audio_enc_close (GstAudioEncoder * enc)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_audio_enc_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
|
||||
gst_mf_audio_encoder_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncClass *klass = GST_MF_AUDIO_ENC_GET_CLASS (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
GstMFAudioEncoderClass *klass = GST_MF_AUDIO_ENCODER_GET_CLASS (enc);
|
||||
ComPtr < IMFMediaType > in_type;
|
||||
ComPtr < IMFMediaType > out_type;
|
||||
|
||||
GST_DEBUG_OBJECT (self, "Set format");
|
||||
|
||||
gst_mf_audio_enc_drain (enc);
|
||||
gst_mf_audio_encoder_drain (enc);
|
||||
|
||||
if (!gst_mf_transform_open (self->transform)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to open MFT");
|
||||
|
@ -173,7 +174,8 @@ gst_mf_audio_enc_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_audio_enc_process_input (GstMFAudioEnc * self, GstBuffer * buffer)
|
||||
gst_mf_audio_encoder_process_input (GstMFAudioEncoder * self,
|
||||
GstBuffer * buffer)
|
||||
{
|
||||
HRESULT hr;
|
||||
ComPtr < IMFSample > sample;
|
||||
|
@ -240,9 +242,9 @@ done:
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_audio_enc_process_output (GstMFAudioEnc * self)
|
||||
gst_mf_audio_encoder_process_output (GstMFAudioEncoder * self)
|
||||
{
|
||||
GstMFAudioEncClass *klass = GST_MF_AUDIO_ENC_GET_CLASS (self);
|
||||
GstMFAudioEncoderClass *klass = GST_MF_AUDIO_ENCODER_GET_CLASS (self);
|
||||
HRESULT hr;
|
||||
BYTE *data = nullptr;
|
||||
ComPtr < IMFMediaBuffer > media_buffer;
|
||||
|
@ -281,21 +283,21 @@ gst_mf_audio_enc_process_output (GstMFAudioEnc * self)
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_audio_enc_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer)
|
||||
gst_mf_audio_encoder_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
GstFlowReturn ret;
|
||||
|
||||
if (!buffer)
|
||||
return gst_mf_audio_enc_drain (enc);
|
||||
return gst_mf_audio_encoder_drain (enc);
|
||||
|
||||
if (!gst_mf_audio_enc_process_input (self, buffer)) {
|
||||
if (!gst_mf_audio_encoder_process_input (self, buffer)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to process input");
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
do {
|
||||
ret = gst_mf_audio_enc_process_output (self);
|
||||
ret = gst_mf_audio_encoder_process_output (self);
|
||||
} while (ret == GST_FLOW_OK);
|
||||
|
||||
if (ret == GST_MF_TRANSFORM_FLOW_NEED_DATA)
|
||||
|
@ -305,9 +307,9 @@ gst_mf_audio_enc_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer)
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_audio_enc_drain (GstAudioEncoder * enc)
|
||||
gst_mf_audio_encoder_drain (GstAudioEncoder * enc)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
if (!self->transform)
|
||||
|
@ -316,7 +318,7 @@ gst_mf_audio_enc_drain (GstAudioEncoder * enc)
|
|||
gst_mf_transform_drain (self->transform);
|
||||
|
||||
do {
|
||||
ret = gst_mf_audio_enc_process_output (self);
|
||||
ret = gst_mf_audio_encoder_process_output (self);
|
||||
} while (ret == GST_FLOW_OK);
|
||||
|
||||
if (ret == GST_MF_TRANSFORM_FLOW_NEED_DATA)
|
||||
|
@ -326,9 +328,9 @@ gst_mf_audio_enc_drain (GstAudioEncoder * enc)
|
|||
}
|
||||
|
||||
static void
|
||||
gst_mf_audio_enc_flush (GstAudioEncoder * enc)
|
||||
gst_mf_audio_encoder_flush (GstAudioEncoder * enc)
|
||||
{
|
||||
GstMFAudioEnc *self = GST_MF_AUDIO_ENC (enc);
|
||||
GstMFAudioEncoder *self = GST_MF_AUDIO_ENCODER (enc);
|
||||
|
||||
if (!self->transform)
|
||||
return;
|
|
@ -17,8 +17,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_MF_AUDIO_ENC_H__
|
||||
#define __GST_MF_AUDIO_ENC_H__
|
||||
#pragma once
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/audio/audio.h>
|
||||
|
@ -27,17 +26,17 @@
|
|||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GST_TYPE_MF_AUDIO_ENC (gst_mf_audio_enc_get_type())
|
||||
#define GST_MF_AUDIO_ENC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_AUDIO_ENC,GstMFAudioEnc))
|
||||
#define GST_MF_AUDIO_ENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_AUDIO_ENC,GstMFAudioEncClass))
|
||||
#define GST_MF_AUDIO_ENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_AUDIO_ENC,GstMFAudioEncClass))
|
||||
#define GST_IS_MF_AUDIO_ENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_AUDIO_ENC))
|
||||
#define GST_IS_MF_AUDIO_ENC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_AUDIO_ENC))
|
||||
#define GST_TYPE_MF_AUDIO_ENCODER (gst_mf_audio_encoder_get_type())
|
||||
#define GST_MF_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_AUDIO_ENCODER,GstMFAudioEncoder))
|
||||
#define GST_MF_AUDIO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_AUDIO_ENCODER,GstMFAudioEncoderClass))
|
||||
#define GST_MF_AUDIO_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_AUDIO_ENCODER,GstMFAudioEncoderClass))
|
||||
#define GST_IS_MF_AUDIO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_AUDIO_ENCODER))
|
||||
#define GST_IS_MF_AUDIO_ENCODER_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_AUDIO_ENCODER))
|
||||
|
||||
typedef struct _GstMFAudioEnc GstMFAudioEnc;
|
||||
typedef struct _GstMFAudioEncClass GstMFAudioEncClass;
|
||||
typedef struct _GstMFAudioEncoder GstMFAudioEncoder;
|
||||
typedef struct _GstMFAudioEncoderClass GstMFAudioEncoderClass;
|
||||
|
||||
struct _GstMFAudioEnc
|
||||
struct _GstMFAudioEncoder
|
||||
{
|
||||
GstAudioEncoder parent;
|
||||
|
||||
|
@ -46,7 +45,7 @@ struct _GstMFAudioEnc
|
|||
guint64 sample_count;
|
||||
};
|
||||
|
||||
struct _GstMFAudioEncClass
|
||||
struct _GstMFAudioEncoderClass
|
||||
{
|
||||
GstAudioEncoderClass parent_class;
|
||||
|
||||
|
@ -55,20 +54,19 @@ struct _GstMFAudioEncClass
|
|||
guint device_index;
|
||||
gint frame_samples;
|
||||
|
||||
gboolean (*get_output_type) (GstMFAudioEnc * mfenc,
|
||||
gboolean (*get_output_type) (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info,
|
||||
IMFMediaType ** output_type);
|
||||
|
||||
gboolean (*get_input_type) (GstMFAudioEnc * mfenc,
|
||||
gboolean (*get_input_type) (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info,
|
||||
IMFMediaType ** input_type);
|
||||
|
||||
gboolean (*set_src_caps) (GstMFAudioEnc * mfenc,
|
||||
gboolean (*set_src_caps) (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info);
|
||||
};
|
||||
|
||||
GType gst_mf_audio_enc_get_type (void);
|
||||
GType gst_mf_audio_encoder_get_type (void);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_MF_AUDIO_ENC_H__ */
|
|
@ -39,7 +39,7 @@
|
|||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/pbutils/pbutils.h>
|
||||
#include "gstmfvideoenc.h"
|
||||
#include "gstmfvideoencoder.h"
|
||||
#include "gstmfh264enc.h"
|
||||
#include <wrl.h>
|
||||
|
||||
|
@ -191,7 +191,7 @@ enum
|
|||
|
||||
typedef struct _GstMFH264Enc
|
||||
{
|
||||
GstMFVideoEnc parent;
|
||||
GstMFVideoEncoder parent;
|
||||
|
||||
/* properties */
|
||||
guint bitrate;
|
||||
|
@ -223,7 +223,7 @@ typedef struct _GstMFH264Enc
|
|||
|
||||
typedef struct _GstMFH264EncClass
|
||||
{
|
||||
GstMFVideoEncClass parent_class;
|
||||
GstMFVideoEncoderClass parent_class;
|
||||
} GstMFH264EncClass;
|
||||
|
||||
static GstElementClass *parent_class = nullptr;
|
||||
|
@ -233,9 +233,9 @@ static void gst_mf_h264_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_mf_h264_enc_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static gboolean gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_h264_enc_set_option (GstMFVideoEncoder * mfenc,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
static gboolean gst_mf_h264_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_h264_enc_set_src_caps (GstMFVideoEncoder * mfenc,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
|
||||
static void
|
||||
|
@ -243,9 +243,9 @@ gst_mf_h264_enc_class_init (GstMFH264EncClass * klass, gpointer data)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
|
||||
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
|
||||
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
|
||||
GstMFVideoEncoderClass *mfenc_class = GST_MF_VIDEO_ENCODER_CLASS (klass);
|
||||
GstMFVideoEncoderClassData *cdata = (GstMFVideoEncoderClassData *) data;
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &cdata->device_caps;
|
||||
gchar *long_name;
|
||||
gchar *classification;
|
||||
|
||||
|
@ -571,7 +571,7 @@ gst_mf_h264_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstMFH264Enc *self = (GstMFH264Enc *) (object);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (object);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_BITRATE:
|
||||
|
@ -782,12 +782,12 @@ gst_mf_h264_enc_content_type_to_enum (guint rc_mode)
|
|||
} G_STMT_END
|
||||
|
||||
static gboolean
|
||||
gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
||||
IMFMediaType * output_type)
|
||||
gst_mf_h264_enc_set_option (GstMFVideoEncoder * mfenc,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFH264Enc *self = (GstMFH264Enc *) mfenc;
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
|
||||
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (mfenc);
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &klass->device_caps;
|
||||
HRESULT hr;
|
||||
GstCaps *allowed_caps, *template_caps;
|
||||
eAVEncH264VProfile selected_profile = eAVEncH264VProfile_Main;
|
||||
|
@ -1022,7 +1022,7 @@ gst_mf_h264_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_h264_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
gst_mf_h264_enc_set_src_caps (GstMFVideoEncoder * mfenc,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFH264Enc *self = (GstMFH264Enc *) mfenc;
|
||||
|
@ -1076,5 +1076,6 @@ gst_mf_h264_enc_plugin_init (GstPlugin * plugin, guint rank,
|
|||
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_h264_enc_debug, "mfh264enc", 0, "mfh264enc");
|
||||
|
||||
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info, d3d11_device);
|
||||
gst_mf_video_encoder_register (plugin,
|
||||
rank, &subtype, &type_info, d3d11_device);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include "gstmfvideoenc.h"
|
||||
#include "gstmfvideoencoder.h"
|
||||
#include "gstmfh265enc.h"
|
||||
#include <wrl.h>
|
||||
|
||||
|
@ -142,7 +142,7 @@ enum
|
|||
|
||||
typedef struct _GstMFH265Enc
|
||||
{
|
||||
GstMFVideoEnc parent;
|
||||
GstMFVideoEncoder parent;
|
||||
|
||||
/* properties */
|
||||
guint bitrate;
|
||||
|
@ -168,7 +168,7 @@ typedef struct _GstMFH265Enc
|
|||
|
||||
typedef struct _GstMFH265EncClass
|
||||
{
|
||||
GstMFVideoEncClass parent_class;
|
||||
GstMFVideoEncoderClass parent_class;
|
||||
} GstMFH265EncClass;
|
||||
|
||||
static GstElementClass *parent_class = nullptr;
|
||||
|
@ -177,9 +177,9 @@ static void gst_mf_h265_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_mf_h265_enc_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static gboolean gst_mf_h265_enc_set_option (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_h265_enc_set_option (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
static gboolean gst_mf_h265_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_h265_enc_set_src_caps (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
|
||||
static void
|
||||
|
@ -187,9 +187,9 @@ gst_mf_h265_enc_class_init (GstMFH265EncClass * klass, gpointer data)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
|
||||
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
|
||||
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
|
||||
GstMFVideoEncoderClass *encoder_class = GST_MF_VIDEO_ENCODER_CLASS (klass);
|
||||
GstMFVideoEncoderClassData *cdata = (GstMFVideoEncoderClassData *) data;
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &cdata->device_caps;
|
||||
gchar *long_name;
|
||||
gchar *classification;
|
||||
|
||||
|
@ -404,13 +404,14 @@ gst_mf_h265_enc_class_init (GstMFH265EncClass * klass, gpointer data)
|
|||
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
|
||||
cdata->src_caps));
|
||||
|
||||
mfenc_class->set_option = GST_DEBUG_FUNCPTR (gst_mf_h265_enc_set_option);
|
||||
mfenc_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_h265_enc_set_src_caps);
|
||||
encoder_class->set_option = GST_DEBUG_FUNCPTR (gst_mf_h265_enc_set_option);
|
||||
encoder_class->set_src_caps =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_h265_enc_set_src_caps);
|
||||
|
||||
mfenc_class->codec_id = MFVideoFormat_HEVC;
|
||||
mfenc_class->enum_flags = cdata->enum_flags;
|
||||
mfenc_class->device_index = cdata->device_index;
|
||||
mfenc_class->device_caps = *device_caps;
|
||||
encoder_class->codec_id = MFVideoFormat_HEVC;
|
||||
encoder_class->enum_flags = cdata->enum_flags;
|
||||
encoder_class->device_index = cdata->device_index;
|
||||
encoder_class->device_caps = *device_caps;
|
||||
|
||||
g_free (cdata->device_name);
|
||||
gst_caps_unref (cdata->sink_caps);
|
||||
|
@ -444,7 +445,7 @@ gst_mf_h265_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstMFH265Enc *self = (GstMFH265Enc *) (object);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (object);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_BITRATE:
|
||||
|
@ -608,20 +609,20 @@ gst_mf_h265_enc_content_type_to_enum (guint rc_mode)
|
|||
} G_STMT_END
|
||||
|
||||
static gboolean
|
||||
gst_mf_h265_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
||||
IMFMediaType * output_type)
|
||||
gst_mf_h265_enc_set_option (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFH265Enc *self = (GstMFH265Enc *) mfenc;
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
|
||||
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFH265Enc *self = (GstMFH265Enc *) encoder;
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &klass->device_caps;
|
||||
HRESULT hr;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
|
||||
hr = output_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_HEVC);
|
||||
if (!gst_mf_result (hr))
|
||||
return FALSE;
|
||||
|
||||
if (GST_VIDEO_INFO_FORMAT (&mfenc->input_state->info) ==
|
||||
if (GST_VIDEO_INFO_FORMAT (&encoder->input_state->info) ==
|
||||
GST_VIDEO_FORMAT_P010_10LE) {
|
||||
hr = output_type->SetUINT32 (MF_MT_MPEG2_PROFILE,
|
||||
eAVEncH265VProfile_Main_420_10);
|
||||
|
@ -667,12 +668,12 @@ gst_mf_h265_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
|||
WARNING_HR (hr, CODECAPI_AVEncCommonQualityVsSpeed);
|
||||
}
|
||||
|
||||
mfenc->has_reorder_frame = FALSE;
|
||||
encoder->has_reorder_frame = FALSE;
|
||||
if (device_caps->bframes) {
|
||||
hr = gst_mf_transform_set_codec_api_uint32 (transform,
|
||||
&CODECAPI_AVEncMPVDefaultBPictureCount, self->bframes);
|
||||
if (SUCCEEDED (hr) && self->bframes > 0)
|
||||
mfenc->has_reorder_frame = TRUE;
|
||||
encoder->has_reorder_frame = TRUE;
|
||||
|
||||
WARNING_HR (hr, CODECAPI_AVEncMPVDefaultBPictureCount);
|
||||
}
|
||||
|
@ -761,10 +762,10 @@ gst_mf_h265_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_h265_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
gst_mf_h265_enc_set_src_caps (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFH265Enc *self = (GstMFH265Enc *) mfenc;
|
||||
GstMFH265Enc *self = (GstMFH265Enc *) encoder;
|
||||
GstVideoCodecState *out_state;
|
||||
GstStructure *s;
|
||||
GstCaps *out_caps;
|
||||
|
@ -776,7 +777,7 @@ gst_mf_h265_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
|||
gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream",
|
||||
"alignment", G_TYPE_STRING, "au", nullptr);
|
||||
|
||||
if (GST_VIDEO_INFO_FORMAT (&mfenc->input_state->info) ==
|
||||
if (GST_VIDEO_INFO_FORMAT (&encoder->input_state->info) ==
|
||||
GST_VIDEO_FORMAT_P010_10LE) {
|
||||
gst_structure_set (s, "profile", G_TYPE_STRING, "main-10", nullptr);
|
||||
} else {
|
||||
|
@ -821,5 +822,6 @@ gst_mf_h265_enc_plugin_init (GstPlugin * plugin, guint rank,
|
|||
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_h265_enc_debug, "mfh265enc", 0, "mfh265enc");
|
||||
|
||||
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info, d3d11_device);
|
||||
gst_mf_video_encoder_register (plugin,
|
||||
rank, &subtype, &type_info, d3d11_device);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/pbutils/pbutils.h>
|
||||
#include "gstmfaudioenc.h"
|
||||
#include "gstmfaudioencoder.h"
|
||||
#include "gstmfmp3enc.h"
|
||||
#include <wrl.h>
|
||||
#include <set>
|
||||
|
@ -60,7 +60,7 @@ enum
|
|||
|
||||
typedef struct _GstMFMp3Enc
|
||||
{
|
||||
GstMFAudioEnc parent;
|
||||
GstMFAudioEncoder parent;
|
||||
|
||||
/* properties */
|
||||
guint bitrate;
|
||||
|
@ -68,7 +68,7 @@ typedef struct _GstMFMp3Enc
|
|||
|
||||
typedef struct _GstMFMp3EncClass
|
||||
{
|
||||
GstMFAudioEncClass parent_class;
|
||||
GstMFAudioEncoderClass parent_class;
|
||||
|
||||
} GstMFMp3EncClass;
|
||||
|
||||
|
@ -90,11 +90,11 @@ static void gst_mf_mp3_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_mf_mp3_enc_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static gboolean gst_mf_mp3_enc_get_output_type (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_mp3_enc_get_output_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** output_type);
|
||||
static gboolean gst_mf_mp3_enc_get_input_type (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_mp3_enc_get_input_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** input_type);
|
||||
static gboolean gst_mf_mp3_enc_set_src_caps (GstMFAudioEnc * mfenc,
|
||||
static gboolean gst_mf_mp3_enc_set_src_caps (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info);
|
||||
|
||||
static void
|
||||
|
@ -102,7 +102,7 @@ gst_mf_mp3_enc_class_init (GstMFMp3EncClass * klass, gpointer data)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstMFAudioEncClass *mfenc_class = GST_MF_AUDIO_ENC_CLASS (klass);
|
||||
GstMFAudioEncoderClass *encoder_class = GST_MF_AUDIO_ENCODER_CLASS (klass);
|
||||
GstMFMp3EncClassData *cdata = (GstMFMp3EncClassData *) data;
|
||||
gchar *long_name;
|
||||
gchar *classification;
|
||||
|
@ -149,16 +149,16 @@ gst_mf_mp3_enc_class_init (GstMFMp3EncClass * klass, gpointer data)
|
|||
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
|
||||
cdata->src_caps));
|
||||
|
||||
mfenc_class->get_output_type =
|
||||
encoder_class->get_output_type =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_mp3_enc_get_output_type);
|
||||
mfenc_class->get_input_type =
|
||||
encoder_class->get_input_type =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_mp3_enc_get_input_type);
|
||||
mfenc_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_mp3_enc_set_src_caps);
|
||||
encoder_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_mp3_enc_set_src_caps);
|
||||
|
||||
mfenc_class->codec_id = MFAudioFormat_MP3;
|
||||
mfenc_class->enum_flags = cdata->enum_flags;
|
||||
mfenc_class->device_index = cdata->device_index;
|
||||
mfenc_class->frame_samples = 1152;
|
||||
encoder_class->codec_id = MFAudioFormat_MP3;
|
||||
encoder_class->enum_flags = cdata->enum_flags;
|
||||
encoder_class->device_index = cdata->device_index;
|
||||
encoder_class->frame_samples = 1152;
|
||||
|
||||
g_free (cdata->device_name);
|
||||
gst_caps_unref (cdata->sink_caps);
|
||||
|
@ -205,11 +205,11 @@ gst_mf_mp3_enc_set_property (GObject * object, guint prop_id,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_mp3_enc_get_output_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
||||
IMFMediaType ** output_type)
|
||||
gst_mf_mp3_enc_get_output_type (GstMFAudioEncoder * encoder,
|
||||
GstAudioInfo * info, IMFMediaType ** output_type)
|
||||
{
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) mfenc;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) encoder;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
GList *output_list = nullptr;
|
||||
GList *iter;
|
||||
ComPtr < IMFMediaType > target_output;
|
||||
|
@ -339,11 +339,11 @@ gst_mf_mp3_enc_get_output_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_mp3_enc_get_input_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
||||
gst_mf_mp3_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
||||
IMFMediaType ** input_type)
|
||||
{
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) mfenc;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) encoder;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
GList *input_list = nullptr;
|
||||
GList *iter;
|
||||
ComPtr < IMFMediaType > target_input;
|
||||
|
@ -416,15 +416,15 @@ gst_mf_mp3_enc_get_input_type (GstMFAudioEnc * mfenc, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_mp3_enc_set_src_caps (GstMFAudioEnc * mfenc, GstAudioInfo * info)
|
||||
gst_mf_mp3_enc_set_src_caps (GstMFAudioEncoder * encoder, GstAudioInfo * info)
|
||||
{
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) mfenc;
|
||||
GstMFMp3Enc *self = (GstMFMp3Enc *) encoder;
|
||||
GstCaps *src_caps;
|
||||
gboolean ret;
|
||||
ComPtr < IMFMediaType > output_type;
|
||||
gint version = 1;
|
||||
|
||||
if (!gst_mf_transform_get_output_current_type (mfenc->transform,
|
||||
if (!gst_mf_transform_get_output_current_type (encoder->transform,
|
||||
&output_type)) {
|
||||
GST_ERROR_OBJECT (self, "Couldn't get current output type");
|
||||
return FALSE;
|
||||
|
@ -502,7 +502,7 @@ gst_mf_mp3_enc_register (GstPlugin * plugin, guint rank,
|
|||
}
|
||||
|
||||
type =
|
||||
g_type_register_static (GST_TYPE_MF_AUDIO_ENC, type_name, &type_info,
|
||||
g_type_register_static (GST_TYPE_MF_AUDIO_ENCODER, type_name, &type_info,
|
||||
(GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
|
|
@ -244,10 +244,13 @@ gst_mf_enum_media_type_from_source_reader (IMFSourceReader * source_reader,
|
|||
}
|
||||
|
||||
done:
|
||||
if (!list)
|
||||
return FALSE;
|
||||
|
||||
list = g_list_reverse (list);
|
||||
*media_types = list;
|
||||
|
||||
return !!list;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -935,12 +938,14 @@ gst_mf_source_enum_device_activate (GstMFSourceReader * self,
|
|||
}
|
||||
|
||||
done:
|
||||
ret = g_list_reverse (ret);
|
||||
CoTaskMemFree (devices);
|
||||
|
||||
*device_sources = ret;
|
||||
if (!ret)
|
||||
return FALSE;
|
||||
|
||||
return !!ret;
|
||||
*device_sources = g_list_reverse (ret);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
|
|
|
@ -537,7 +537,11 @@ gst_mf_transform_thread_func (GstMFTransform * self)
|
|||
|
||||
CoTaskMemFree (devices);
|
||||
|
||||
self->hardware = !!(self->enum_params.enum_flags & MFT_ENUM_FLAG_HARDWARE);
|
||||
if ((self->enum_params.enum_flags & MFT_ENUM_FLAG_HARDWARE) != 0)
|
||||
self->hardware = TRUE;
|
||||
else
|
||||
self->hardware = FALSE;
|
||||
|
||||
self->initialized = TRUE;
|
||||
|
||||
run_loop:
|
||||
|
@ -1131,9 +1135,12 @@ gst_mf_transform_get_input_available_types (GstMFTransform * object,
|
|||
index++;
|
||||
} while (SUCCEEDED (hr));
|
||||
|
||||
if (!list)
|
||||
return FALSE;
|
||||
|
||||
*input_types = list;
|
||||
|
||||
return !!list;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
gboolean
|
||||
|
@ -1165,9 +1172,12 @@ gst_mf_transform_get_output_available_types (GstMFTransform * object,
|
|||
index++;
|
||||
} while (SUCCEEDED (hr));
|
||||
|
||||
if (!list)
|
||||
return FALSE;
|
||||
|
||||
*output_types = list;
|
||||
|
||||
return !!list;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
gboolean
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include "gstmfvideoenc.h"
|
||||
#include "gstmfvideoencoder.h"
|
||||
#include "gstmfvideobuffer.h"
|
||||
#include "gstmfplatloader.h"
|
||||
#include <wrl.h>
|
||||
|
@ -38,72 +38,77 @@
|
|||
using namespace Microsoft::WRL;
|
||||
/* *INDENT-ON* */
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_mf_video_enc_debug);
|
||||
#define GST_CAT_DEFAULT gst_mf_video_enc_debug
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_mf_video_encoder_debug);
|
||||
#define GST_CAT_DEFAULT gst_mf_video_encoder_debug
|
||||
|
||||
#define gst_mf_video_enc_parent_class parent_class
|
||||
G_DEFINE_ABSTRACT_TYPE (GstMFVideoEnc, gst_mf_video_enc,
|
||||
#define gst_mf_video_encoder_parent_class parent_class
|
||||
G_DEFINE_ABSTRACT_TYPE (GstMFVideoEncoder, gst_mf_video_encoder,
|
||||
GST_TYPE_VIDEO_ENCODER);
|
||||
|
||||
static void gst_mf_video_enc_dispose (GObject * object);
|
||||
static void gst_mf_video_enc_set_context (GstElement * element,
|
||||
static void gst_mf_video_encoder_dispose (GObject * object);
|
||||
static void gst_mf_video_encoder_set_context (GstElement * element,
|
||||
GstContext * context);
|
||||
static gboolean gst_mf_video_enc_open (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_enc_close (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_enc_start (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_enc_set_format (GstVideoEncoder * enc,
|
||||
static gboolean gst_mf_video_encoder_open (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_encoder_close (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_encoder_start (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_encoder_set_format (GstVideoEncoder * enc,
|
||||
GstVideoCodecState * state);
|
||||
static GstFlowReturn gst_mf_video_enc_handle_frame (GstVideoEncoder * enc,
|
||||
static GstFlowReturn gst_mf_video_encoder_handle_frame (GstVideoEncoder * enc,
|
||||
GstVideoCodecFrame * frame);
|
||||
static GstFlowReturn gst_mf_video_enc_finish (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_enc_flush (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_enc_propose_allocation (GstVideoEncoder * enc,
|
||||
static GstFlowReturn gst_mf_video_encoder_finish (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_encoder_flush (GstVideoEncoder * enc);
|
||||
static gboolean gst_mf_video_encoder_propose_allocation (GstVideoEncoder * enc,
|
||||
GstQuery * query);
|
||||
static gboolean gst_mf_video_enc_sink_query (GstVideoEncoder * enc,
|
||||
static gboolean gst_mf_video_encoder_sink_query (GstVideoEncoder * enc,
|
||||
GstQuery * query);
|
||||
static gboolean gst_mf_video_enc_src_query (GstVideoEncoder * enc,
|
||||
static gboolean gst_mf_video_encoder_src_query (GstVideoEncoder * enc,
|
||||
GstQuery * query);
|
||||
|
||||
static HRESULT gst_mf_video_on_new_sample (GstMFTransform * object,
|
||||
IMFSample * sample, GstMFVideoEnc * self);
|
||||
IMFSample * sample, GstMFVideoEncoder * self);
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_class_init (GstMFVideoEncClass * klass)
|
||||
gst_mf_video_encoder_class_init (GstMFVideoEncoderClass * klass)
|
||||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass);
|
||||
|
||||
gobject_class->dispose = gst_mf_video_enc_dispose;
|
||||
gobject_class->dispose = gst_mf_video_encoder_dispose;
|
||||
|
||||
element_class->set_context = GST_DEBUG_FUNCPTR (gst_mf_video_enc_set_context);
|
||||
element_class->set_context =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_set_context);
|
||||
|
||||
videoenc_class->open = GST_DEBUG_FUNCPTR (gst_mf_video_enc_open);
|
||||
videoenc_class->close = GST_DEBUG_FUNCPTR (gst_mf_video_enc_close);
|
||||
videoenc_class->start = GST_DEBUG_FUNCPTR (gst_mf_video_enc_start);
|
||||
videoenc_class->set_format = GST_DEBUG_FUNCPTR (gst_mf_video_enc_set_format);
|
||||
videoenc_class->open = GST_DEBUG_FUNCPTR (gst_mf_video_encoder_open);
|
||||
videoenc_class->close = GST_DEBUG_FUNCPTR (gst_mf_video_encoder_close);
|
||||
videoenc_class->start = GST_DEBUG_FUNCPTR (gst_mf_video_encoder_start);
|
||||
videoenc_class->set_format =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_set_format);
|
||||
videoenc_class->handle_frame =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_enc_handle_frame);
|
||||
videoenc_class->finish = GST_DEBUG_FUNCPTR (gst_mf_video_enc_finish);
|
||||
videoenc_class->flush = GST_DEBUG_FUNCPTR (gst_mf_video_enc_flush);
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_handle_frame);
|
||||
videoenc_class->finish = GST_DEBUG_FUNCPTR (gst_mf_video_encoder_finish);
|
||||
videoenc_class->flush = GST_DEBUG_FUNCPTR (gst_mf_video_encoder_flush);
|
||||
videoenc_class->propose_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_enc_propose_allocation);
|
||||
videoenc_class->sink_query = GST_DEBUG_FUNCPTR (gst_mf_video_enc_sink_query);
|
||||
videoenc_class->src_query = GST_DEBUG_FUNCPTR (gst_mf_video_enc_src_query);
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_propose_allocation);
|
||||
videoenc_class->sink_query =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_sink_query);
|
||||
videoenc_class->src_query =
|
||||
GST_DEBUG_FUNCPTR (gst_mf_video_encoder_src_query);
|
||||
|
||||
gst_type_mark_as_plugin_api (GST_TYPE_MF_VIDEO_ENC, (GstPluginAPIFlags) 0);
|
||||
gst_type_mark_as_plugin_api (GST_TYPE_MF_VIDEO_ENCODER,
|
||||
(GstPluginAPIFlags) 0);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_init (GstMFVideoEnc * self)
|
||||
gst_mf_video_encoder_init (GstMFVideoEncoder * self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_dispose (GObject * object)
|
||||
gst_mf_video_encoder_dispose (GObject * object)
|
||||
{
|
||||
#if GST_MF_HAVE_D3D11
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (object);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (object);
|
||||
|
||||
gst_clear_object (&self->d3d11_device);
|
||||
gst_clear_object (&self->other_d3d11_device);
|
||||
|
@ -113,12 +118,12 @@ gst_mf_video_enc_dispose (GObject * object)
|
|||
}
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_set_context (GstElement * element, GstContext * context)
|
||||
gst_mf_video_encoder_set_context (GstElement * element, GstContext * context)
|
||||
{
|
||||
#if GST_MF_HAVE_D3D11
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (element);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (self);
|
||||
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (element);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (self);
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &klass->device_caps;
|
||||
|
||||
if (device_caps->d3d11_aware) {
|
||||
gst_d3d11_handle_set_context_for_adapter_luid (element, context,
|
||||
|
@ -130,14 +135,13 @@ gst_mf_video_enc_set_context (GstElement * element, GstContext * context)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_open (GstVideoEncoder * enc)
|
||||
gst_mf_video_encoder_open (GstVideoEncoder * enc)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (enc);
|
||||
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (enc);
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFTransformEnumParams enum_params = { 0, };
|
||||
MFT_REGISTER_TYPE_INFO output_type;
|
||||
gboolean ret;
|
||||
|
||||
#if GST_MF_HAVE_D3D11
|
||||
if (device_caps->d3d11_aware) {
|
||||
|
@ -214,9 +218,7 @@ gst_mf_video_enc_open (GstVideoEncoder * enc)
|
|||
device_caps->d3d11_aware, device_caps->adapter_luid);
|
||||
|
||||
self->transform = gst_mf_transform_new (&enum_params);
|
||||
ret = !!self->transform;
|
||||
|
||||
if (!ret) {
|
||||
if (!self->transform) {
|
||||
GST_ERROR_OBJECT (self, "Cannot create MFT object");
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -234,13 +236,13 @@ gst_mf_video_enc_open (GstVideoEncoder * enc)
|
|||
self->async_mft = FALSE;
|
||||
}
|
||||
|
||||
return ret;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_close (GstVideoEncoder * enc)
|
||||
gst_mf_video_encoder_close (GstVideoEncoder * enc)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
|
||||
gst_clear_object (&self->transform);
|
||||
|
||||
|
@ -268,7 +270,7 @@ gst_mf_video_enc_close (GstVideoEncoder * enc)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_start (GstVideoEncoder * enc)
|
||||
gst_mf_video_encoder_start (GstVideoEncoder * enc)
|
||||
{
|
||||
/* Media Foundation Transform will shift PTS in case that B-frame is enabled.
|
||||
* We need to adjust DTS correspondingly */
|
||||
|
@ -278,10 +280,11 @@ gst_mf_video_enc_start (GstVideoEncoder * enc)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_set_format (GstVideoEncoder * enc, GstVideoCodecState * state)
|
||||
gst_mf_video_encoder_set_format (GstVideoEncoder * enc,
|
||||
GstVideoCodecState * state)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (enc);
|
||||
GstVideoInfo *info = &state->info;
|
||||
ComPtr < IMFMediaType > in_type;
|
||||
ComPtr < IMFMediaType > out_type;
|
||||
|
@ -292,7 +295,7 @@ gst_mf_video_enc_set_format (GstVideoEncoder * enc, GstVideoCodecState * state)
|
|||
|
||||
GST_DEBUG_OBJECT (self, "Set format");
|
||||
|
||||
gst_mf_video_enc_finish (enc);
|
||||
gst_mf_video_encoder_finish (enc);
|
||||
|
||||
self->mf_pts_offset = 0;
|
||||
self->has_reorder_frame = FALSE;
|
||||
|
@ -547,7 +550,7 @@ gst_mf_video_buffer_free (GstVideoFrame * frame)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_frame_needs_copy (GstVideoFrame * vframe)
|
||||
gst_mf_video_encoder_frame_needs_copy (GstVideoFrame * vframe)
|
||||
{
|
||||
/* Single plane data can be used without copy */
|
||||
if (GST_VIDEO_FRAME_N_PLANES (vframe) == 1)
|
||||
|
@ -632,19 +635,19 @@ gst_mf_video_enc_frame_needs_copy (GstVideoFrame * vframe)
|
|||
typedef struct
|
||||
{
|
||||
LONGLONG mf_pts;
|
||||
} GstMFVideoEncFrameData;
|
||||
} GstMFVideoEncoderFrameData;
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_process_input (GstMFVideoEnc * self,
|
||||
gst_mf_video_encoder_process_input (GstMFVideoEncoder * self,
|
||||
GstVideoCodecFrame * frame, IMFSample * sample)
|
||||
{
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (self);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (self);
|
||||
HRESULT hr;
|
||||
gboolean unset_force_keyframe = FALSE;
|
||||
GstMFVideoEncFrameData *frame_data = nullptr;
|
||||
GstMFVideoEncoderFrameData *frame_data = nullptr;
|
||||
gboolean res;
|
||||
|
||||
frame_data = g_new0 (GstMFVideoEncFrameData, 1);
|
||||
frame_data = g_new0 (GstMFVideoEncoderFrameData, 1);
|
||||
frame_data->mf_pts = frame->pts / 100;
|
||||
|
||||
gst_video_codec_frame_set_user_data (frame,
|
||||
|
@ -696,7 +699,8 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
|
|||
}
|
||||
|
||||
static GstVideoCodecFrame *
|
||||
gst_mf_video_enc_find_output_frame (GstMFVideoEnc * self, LONGLONG mf_pts)
|
||||
gst_mf_video_encoder_find_output_frame (GstMFVideoEncoder * self,
|
||||
LONGLONG mf_pts)
|
||||
{
|
||||
GList *l, *walk = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self));
|
||||
GstVideoCodecFrame *ret = nullptr;
|
||||
|
@ -705,7 +709,7 @@ gst_mf_video_enc_find_output_frame (GstMFVideoEnc * self, LONGLONG mf_pts)
|
|||
|
||||
for (l = walk; l; l = l->next) {
|
||||
GstVideoCodecFrame *frame = (GstVideoCodecFrame *) l->data;
|
||||
GstMFVideoEncFrameData *data = (GstMFVideoEncFrameData *)
|
||||
GstMFVideoEncoderFrameData *data = (GstMFVideoEncoderFrameData *)
|
||||
gst_video_codec_frame_get_user_data (frame);
|
||||
LONGLONG abs_diff;
|
||||
|
||||
|
@ -746,7 +750,8 @@ gst_mf_video_enc_find_output_frame (GstMFVideoEnc * self, LONGLONG mf_pts)
|
|||
}
|
||||
|
||||
static HRESULT
|
||||
gst_mf_video_enc_finish_sample (GstMFVideoEnc * self, IMFSample * sample)
|
||||
gst_mf_video_encoder_finish_sample (GstMFVideoEncoder * self,
|
||||
IMFSample * sample)
|
||||
{
|
||||
HRESULT hr = S_OK;
|
||||
BYTE *data;
|
||||
|
@ -845,7 +850,7 @@ gst_mf_video_enc_finish_sample (GstMFVideoEnc * self, IMFSample * sample)
|
|||
}
|
||||
}
|
||||
|
||||
frame = gst_mf_video_enc_find_output_frame (self, target_mf_pts);
|
||||
frame = gst_mf_video_encoder_find_output_frame (self, target_mf_pts);
|
||||
|
||||
if (frame) {
|
||||
if (keyframe) {
|
||||
|
@ -906,7 +911,7 @@ done:
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_video_enc_process_output (GstMFVideoEnc * self)
|
||||
gst_mf_video_encoder_process_output (GstMFVideoEncoder * self)
|
||||
{
|
||||
ComPtr < IMFSample > sample;
|
||||
GstFlowReturn res = GST_FLOW_ERROR;
|
||||
|
@ -916,13 +921,13 @@ gst_mf_video_enc_process_output (GstMFVideoEnc * self)
|
|||
if (res != GST_FLOW_OK)
|
||||
return res;
|
||||
|
||||
gst_mf_video_enc_finish_sample (self, sample.Get ());
|
||||
gst_mf_video_encoder_finish_sample (self, sample.Get ());
|
||||
|
||||
return self->last_ret;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_create_input_sample (GstMFVideoEnc * self,
|
||||
gst_mf_video_encoder_create_input_sample (GstMFVideoEncoder * self,
|
||||
GstVideoCodecFrame * frame, IMFSample ** sample)
|
||||
{
|
||||
HRESULT hr;
|
||||
|
@ -948,7 +953,7 @@ gst_mf_video_enc_create_input_sample (GstMFVideoEnc * self,
|
|||
goto error;
|
||||
|
||||
/* Check if we can forward this memory to Media Foundation without copy */
|
||||
need_copy = gst_mf_video_enc_frame_needs_copy (vframe);
|
||||
need_copy = gst_mf_video_encoder_frame_needs_copy (vframe);
|
||||
if (need_copy) {
|
||||
GST_TRACE_OBJECT (self, "Copy input buffer into Media Foundation memory");
|
||||
hr = MFCreateMemoryBuffer (GST_VIDEO_INFO_SIZE (info), &media_buffer);
|
||||
|
@ -1031,7 +1036,7 @@ error:
|
|||
|
||||
#if GST_MF_HAVE_D3D11
|
||||
static gboolean
|
||||
gst_mf_video_enc_create_input_sample_d3d11 (GstMFVideoEnc * self,
|
||||
gst_mf_video_encoder_create_input_sample_d3d11 (GstMFVideoEncoder * self,
|
||||
GstVideoCodecFrame * frame, IMFSample ** sample)
|
||||
{
|
||||
HRESULT hr;
|
||||
|
@ -1180,10 +1185,10 @@ gst_mf_video_enc_create_input_sample_d3d11 (GstMFVideoEnc * self,
|
|||
#endif
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_video_enc_handle_frame (GstVideoEncoder * enc,
|
||||
gst_mf_video_encoder_handle_frame (GstVideoEncoder * enc,
|
||||
GstVideoCodecFrame * frame)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
ComPtr < IMFSample > sample;
|
||||
|
||||
|
@ -1194,19 +1199,20 @@ gst_mf_video_enc_handle_frame (GstVideoEncoder * enc,
|
|||
}
|
||||
#if GST_MF_HAVE_D3D11
|
||||
if (self->mf_allocator &&
|
||||
!gst_mf_video_enc_create_input_sample_d3d11 (self, frame, &sample)) {
|
||||
!gst_mf_video_encoder_create_input_sample_d3d11 (self, frame, &sample)) {
|
||||
GST_WARNING_OBJECT (self, "Failed to create IMFSample for D3D11");
|
||||
sample = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!sample && !gst_mf_video_enc_create_input_sample (self, frame, &sample)) {
|
||||
if (!sample
|
||||
&& !gst_mf_video_encoder_create_input_sample (self, frame, &sample)) {
|
||||
GST_ERROR_OBJECT (self, "Failed to create IMFSample");
|
||||
ret = GST_FLOW_ERROR;
|
||||
goto done;
|
||||
}
|
||||
|
||||
if (!gst_mf_video_enc_process_input (self, frame, sample.Get ())) {
|
||||
if (!gst_mf_video_encoder_process_input (self, frame, sample.Get ())) {
|
||||
GST_ERROR_OBJECT (self, "Failed to process input");
|
||||
ret = GST_FLOW_ERROR;
|
||||
goto done;
|
||||
|
@ -1217,7 +1223,7 @@ gst_mf_video_enc_handle_frame (GstVideoEncoder * enc,
|
|||
* from Media Foundation's internal worker queue thread */
|
||||
if (!self->async_mft) {
|
||||
do {
|
||||
ret = gst_mf_video_enc_process_output (self);
|
||||
ret = gst_mf_video_encoder_process_output (self);
|
||||
} while (ret == GST_FLOW_OK);
|
||||
}
|
||||
|
||||
|
@ -1231,9 +1237,9 @@ done:
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_mf_video_enc_finish (GstVideoEncoder * enc)
|
||||
gst_mf_video_encoder_finish (GstVideoEncoder * enc)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
if (!self->transform)
|
||||
|
@ -1251,7 +1257,7 @@ gst_mf_video_enc_finish (GstVideoEncoder * enc)
|
|||
|
||||
if (!self->async_mft) {
|
||||
do {
|
||||
ret = gst_mf_video_enc_process_output (self);
|
||||
ret = gst_mf_video_encoder_process_output (self);
|
||||
} while (ret == GST_FLOW_OK);
|
||||
}
|
||||
|
||||
|
@ -1262,9 +1268,9 @@ gst_mf_video_enc_finish (GstVideoEncoder * enc)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_flush (GstVideoEncoder * enc)
|
||||
gst_mf_video_encoder_flush (GstVideoEncoder * enc)
|
||||
{
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
|
||||
if (!self->transform)
|
||||
goto out;
|
||||
|
@ -1285,10 +1291,11 @@ out:
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_propose_allocation (GstVideoEncoder * enc, GstQuery * query)
|
||||
gst_mf_video_encoder_propose_allocation (GstVideoEncoder * enc,
|
||||
GstQuery * query)
|
||||
{
|
||||
#if GST_MF_HAVE_D3D11
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
GstVideoInfo info;
|
||||
GstBufferPool *pool = nullptr;
|
||||
GstCaps *caps;
|
||||
|
@ -1393,10 +1400,10 @@ config_failed:
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_sink_query (GstVideoEncoder * enc, GstQuery * query)
|
||||
gst_mf_video_encoder_sink_query (GstVideoEncoder * enc, GstQuery * query)
|
||||
{
|
||||
#if GST_MF_HAVE_D3D11
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
|
@ -1414,10 +1421,10 @@ gst_mf_video_enc_sink_query (GstVideoEncoder * enc, GstQuery * query)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_video_enc_src_query (GstVideoEncoder * enc, GstQuery * query)
|
||||
gst_mf_video_encoder_src_query (GstVideoEncoder * enc, GstQuery * query)
|
||||
{
|
||||
#if GST_MF_HAVE_D3D11
|
||||
GstMFVideoEnc *self = GST_MF_VIDEO_ENC (enc);
|
||||
GstMFVideoEncoder *self = GST_MF_VIDEO_ENCODER (enc);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CONTEXT:
|
||||
|
@ -1436,14 +1443,14 @@ gst_mf_video_enc_src_query (GstVideoEncoder * enc, GstQuery * query)
|
|||
|
||||
static HRESULT
|
||||
gst_mf_video_on_new_sample (GstMFTransform * object,
|
||||
IMFSample * sample, GstMFVideoEnc * self)
|
||||
IMFSample * sample, GstMFVideoEncoder * self)
|
||||
{
|
||||
GST_LOG_OBJECT (self, "New Sample callback");
|
||||
|
||||
/* NOTE: this callback will be called from Media Foundation's internal
|
||||
* worker queue thread */
|
||||
GST_VIDEO_ENCODER_STREAM_LOCK (self);
|
||||
gst_mf_video_enc_finish_sample (self, sample);
|
||||
gst_mf_video_encoder_finish_sample (self, sample);
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
|
||||
|
||||
return S_OK;
|
||||
|
@ -1453,11 +1460,11 @@ typedef struct
|
|||
{
|
||||
guint profile;
|
||||
const gchar *profile_str;
|
||||
} GstMFVideoEncProfileMap;
|
||||
} GstMFVideoEncoderProfileMap;
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_enum_internal (GstMFTransform * transform, GUID & subtype,
|
||||
GstObject * d3d11_device, GstMFVideoEncDeviceCaps * device_caps,
|
||||
gst_mf_video_encoder_enum_internal (GstMFTransform * transform, GUID & subtype,
|
||||
GstObject * d3d11_device, GstMFVideoEncoderDeviceCaps * device_caps,
|
||||
GstCaps ** sink_template, GstCaps ** src_template)
|
||||
{
|
||||
HRESULT hr;
|
||||
|
@ -1478,18 +1485,18 @@ gst_mf_video_enc_enum_internal (GstMFTransform * transform, GUID & subtype,
|
|||
IMFTransform *encoder;
|
||||
ICodecAPI *codec_api;
|
||||
ComPtr < IMFMediaType > out_type;
|
||||
GstMFVideoEncProfileMap h264_profile_map[] = {
|
||||
GstMFVideoEncoderProfileMap h264_profile_map[] = {
|
||||
{eAVEncH264VProfile_High, "high"},
|
||||
{eAVEncH264VProfile_Main, "main"},
|
||||
{eAVEncH264VProfile_Base, "baseline"},
|
||||
{0, nullptr},
|
||||
};
|
||||
GstMFVideoEncProfileMap hevc_profile_map[] = {
|
||||
GstMFVideoEncoderProfileMap hevc_profile_map[] = {
|
||||
{eAVEncH265VProfile_Main_420_8, "main"},
|
||||
{eAVEncH265VProfile_Main_420_10, "main-10"},
|
||||
{0, nullptr},
|
||||
};
|
||||
GstMFVideoEncProfileMap *profile_to_check = nullptr;
|
||||
GstMFVideoEncoderProfileMap *profile_to_check = nullptr;
|
||||
static const gchar *h264_caps_str =
|
||||
"video/x-h264, stream-format=(string) byte-stream, alignment=(string) au";
|
||||
static const gchar *hevc_caps_str =
|
||||
|
@ -1785,8 +1792,8 @@ gst_mf_video_enc_enum_internal (GstMFTransform * transform, GUID & subtype,
|
|||
}
|
||||
|
||||
static GstMFTransform *
|
||||
gst_mf_video_enc_enum (guint enum_flags, GUID * subtype, guint device_index,
|
||||
GstMFVideoEncDeviceCaps * device_caps, GstObject * d3d11_device,
|
||||
gst_mf_video_encoder_enum (guint enum_flags, GUID * subtype, guint device_index,
|
||||
GstMFVideoEncoderDeviceCaps * device_caps, GstObject * d3d11_device,
|
||||
GstCaps ** sink_template, GstCaps ** src_template)
|
||||
{
|
||||
GstMFTransformEnumParams enum_params = { 0, };
|
||||
|
@ -1796,7 +1803,7 @@ gst_mf_video_enc_enum (guint enum_flags, GUID * subtype, guint device_index,
|
|||
|
||||
*sink_template = nullptr;
|
||||
*src_template = nullptr;
|
||||
memset (device_caps, 0, sizeof (GstMFVideoEncDeviceCaps));
|
||||
memset (device_caps, 0, sizeof (GstMFVideoEncoderDeviceCaps));
|
||||
|
||||
if (!IsEqualGUID (MFVideoFormat_H264, *subtype) &&
|
||||
!IsEqualGUID (MFVideoFormat_HEVC, *subtype) &&
|
||||
|
@ -1827,16 +1834,16 @@ gst_mf_video_enc_enum (guint enum_flags, GUID * subtype, guint device_index,
|
|||
if (!transform)
|
||||
return nullptr;
|
||||
|
||||
gst_mf_video_enc_enum_internal (transform, output_type.guidSubtype,
|
||||
gst_mf_video_encoder_enum_internal (transform, output_type.guidSubtype,
|
||||
d3d11_device, device_caps, sink_template, src_template);
|
||||
|
||||
return transform;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
|
||||
gst_mf_video_encoder_register_internal (GstPlugin * plugin, guint rank,
|
||||
GUID * subtype, GTypeInfo * type_info,
|
||||
const GstMFVideoEncDeviceCaps * device_caps,
|
||||
const GstMFVideoEncoderDeviceCaps * device_caps,
|
||||
guint32 enum_flags, guint device_index, GstMFTransform * transform,
|
||||
GstCaps * sink_caps, GstCaps * src_caps)
|
||||
{
|
||||
|
@ -1845,7 +1852,7 @@ gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
|
|||
gchar *type_name;
|
||||
gchar *feature_name;
|
||||
gint i;
|
||||
GstMFVideoEncClassData *cdata;
|
||||
GstMFVideoEncoderClassData *cdata;
|
||||
gboolean is_default = TRUE;
|
||||
gchar *device_name = nullptr;
|
||||
const gchar *type_name_prefix = nullptr;
|
||||
|
@ -1869,7 +1876,7 @@ gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
|
|||
g_object_get (transform, "device-name", &device_name, nullptr);
|
||||
g_assert (device_name != nullptr);
|
||||
|
||||
cdata = g_new0 (GstMFVideoEncClassData, 1);
|
||||
cdata = g_new0 (GstMFVideoEncoderClassData, 1);
|
||||
cdata->sink_caps = gst_caps_copy (sink_caps);
|
||||
cdata->src_caps = gst_caps_copy (src_caps);
|
||||
cdata->device_name = device_name;
|
||||
|
@ -1901,7 +1908,7 @@ gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
|
|||
cdata->is_default = is_default;
|
||||
|
||||
type =
|
||||
g_type_register_static (GST_TYPE_MF_VIDEO_ENC, type_name,
|
||||
g_type_register_static (GST_TYPE_MF_VIDEO_ENCODER, type_name,
|
||||
&local_type_info, (GTypeFlags) 0);
|
||||
|
||||
/* make lower rank than default device */
|
||||
|
@ -1919,14 +1926,14 @@ gst_mf_video_enc_register_internal (GstPlugin * plugin, guint rank,
|
|||
}
|
||||
|
||||
void
|
||||
gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
||||
gst_mf_video_encoder_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
||||
GTypeInfo * type_info, GList * d3d11_device)
|
||||
{
|
||||
GstMFTransform *transform = nullptr;
|
||||
GstCaps *sink_template = nullptr;
|
||||
GstCaps *src_template = nullptr;
|
||||
guint enum_flags;
|
||||
GstMFVideoEncDeviceCaps device_caps;
|
||||
GstMFVideoEncoderDeviceCaps device_caps;
|
||||
guint i;
|
||||
|
||||
/* register hardware encoders first */
|
||||
|
@ -1938,7 +1945,8 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
for (iter = d3d11_device; iter; iter = g_list_next (iter)) {
|
||||
GstObject *device = (GstObject *) iter->data;
|
||||
|
||||
transform = gst_mf_video_enc_enum (enum_flags, subtype, 0, &device_caps,
|
||||
transform =
|
||||
gst_mf_video_encoder_enum (enum_flags, subtype, 0, &device_caps,
|
||||
device, &sink_template, &src_template);
|
||||
|
||||
/* No more MFT to enumerate */
|
||||
|
@ -1951,7 +1959,7 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
continue;
|
||||
}
|
||||
|
||||
gst_mf_video_enc_register_internal (plugin, rank, subtype, type_info,
|
||||
gst_mf_video_encoder_register_internal (plugin, rank, subtype, type_info,
|
||||
&device_caps, enum_flags, 0, transform, sink_template, src_template);
|
||||
gst_clear_object (&transform);
|
||||
gst_clear_caps (&sink_template);
|
||||
|
@ -1960,7 +1968,8 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
} else {
|
||||
/* AMD seems to be able to support up to 12 GPUs */
|
||||
for (i = 0; i < 12; i++) {
|
||||
transform = gst_mf_video_enc_enum (enum_flags, subtype, i, &device_caps,
|
||||
transform =
|
||||
gst_mf_video_encoder_enum (enum_flags, subtype, i, &device_caps,
|
||||
nullptr, &sink_template, &src_template);
|
||||
|
||||
/* No more MFT to enumerate */
|
||||
|
@ -1973,7 +1982,7 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
continue;
|
||||
}
|
||||
|
||||
gst_mf_video_enc_register_internal (plugin, rank, subtype, type_info,
|
||||
gst_mf_video_encoder_register_internal (plugin, rank, subtype, type_info,
|
||||
&device_caps, enum_flags, i, transform, sink_template, src_template);
|
||||
gst_clear_object (&transform);
|
||||
gst_clear_caps (&sink_template);
|
||||
|
@ -1985,7 +1994,7 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
enum_flags = (MFT_ENUM_FLAG_SYNCMFT |
|
||||
MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SORTANDFILTER_APPROVED_ONLY);
|
||||
|
||||
transform = gst_mf_video_enc_enum (enum_flags, subtype, 0, &device_caps,
|
||||
transform = gst_mf_video_encoder_enum (enum_flags, subtype, 0, &device_caps,
|
||||
nullptr, &sink_template, &src_template);
|
||||
|
||||
if (!transform)
|
||||
|
@ -1994,7 +2003,7 @@ gst_mf_video_enc_register (GstPlugin * plugin, guint rank, GUID * subtype,
|
|||
if (!sink_template)
|
||||
goto done;
|
||||
|
||||
gst_mf_video_enc_register_internal (plugin, rank, subtype, type_info,
|
||||
gst_mf_video_encoder_register_internal (plugin, rank, subtype, type_info,
|
||||
&device_caps, enum_flags, 0, transform, sink_template, src_template);
|
||||
|
||||
done:
|
|
@ -18,8 +18,7 @@
|
|||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_MF_VIDEO_ENC_H__
|
||||
#define __GST_MF_VIDEO_ENC_H__
|
||||
#pragma once
|
||||
|
||||
#include "gstmfconfig.h"
|
||||
|
||||
|
@ -34,19 +33,19 @@
|
|||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GST_TYPE_MF_VIDEO_ENC (gst_mf_video_enc_get_type())
|
||||
#define GST_MF_VIDEO_ENC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_VIDEO_ENC,GstMFVideoEnc))
|
||||
#define GST_MF_VIDEO_ENC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_VIDEO_ENC,GstMFVideoEncClass))
|
||||
#define GST_MF_VIDEO_ENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_VIDEO_ENC,GstMFVideoEncClass))
|
||||
#define GST_IS_MF_VIDEO_ENC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_VIDEO_ENC))
|
||||
#define GST_IS_MF_VIDEO_ENC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_VIDEO_ENC))
|
||||
#define GST_TYPE_MF_VIDEO_ENCODER (gst_mf_video_encoder_get_type())
|
||||
#define GST_MF_VIDEO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoder))
|
||||
#define GST_MF_VIDEO_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoderClass))
|
||||
#define GST_MF_VIDEO_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_VIDEO_ENCODER,GstMFVideoEncoderClass))
|
||||
#define GST_IS_MF_VIDEO_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MF_VIDEO_ENCODER))
|
||||
#define GST_IS_MF_VIDEO_ENCODER_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_VIDEO_ENCODER))
|
||||
|
||||
typedef struct _GstMFVideoEnc GstMFVideoEnc;
|
||||
typedef struct _GstMFVideoEncClass GstMFVideoEncClass;
|
||||
typedef struct _GstMFVideoEncDeviceCaps GstMFVideoEncDeviceCaps;
|
||||
typedef struct _GstMFVideoEncClassData GstMFVideoEncClassData;
|
||||
typedef struct _GstMFVideoEncoder GstMFVideoEncoder;
|
||||
typedef struct _GstMFVideoEncoderClass GstMFVideoEncoderClass;
|
||||
typedef struct _GstMFVideoEncoderDeviceCaps GstMFVideoEncoderDeviceCaps;
|
||||
typedef struct _GstMFVideoEncoderClassData GstMFVideoEncoderClassData;
|
||||
|
||||
struct _GstMFVideoEncDeviceCaps
|
||||
struct _GstMFVideoEncoderDeviceCaps
|
||||
{
|
||||
gboolean rc_mode; /* AVEncCommonRateControlMode */
|
||||
gboolean quality; /* AVEncCommonQuality */
|
||||
|
@ -79,18 +78,18 @@ struct _GstMFVideoEncDeviceCaps
|
|||
gint64 adapter_luid;
|
||||
};
|
||||
|
||||
struct _GstMFVideoEncClassData
|
||||
struct _GstMFVideoEncoderClassData
|
||||
{
|
||||
GstCaps *sink_caps;
|
||||
GstCaps *src_caps;
|
||||
gchar *device_name;
|
||||
guint32 enum_flags;
|
||||
guint device_index;
|
||||
GstMFVideoEncDeviceCaps device_caps;
|
||||
GstMFVideoEncoderDeviceCaps device_caps;
|
||||
gboolean is_default;
|
||||
};
|
||||
|
||||
struct _GstMFVideoEnc
|
||||
struct _GstMFVideoEncoder
|
||||
{
|
||||
GstVideoEncoder parent;
|
||||
|
||||
|
@ -117,7 +116,7 @@ struct _GstMFVideoEnc
|
|||
#endif
|
||||
};
|
||||
|
||||
struct _GstMFVideoEncClass
|
||||
struct _GstMFVideoEncoderClass
|
||||
{
|
||||
GstVideoEncoderClass parent_class;
|
||||
|
||||
|
@ -125,25 +124,23 @@ struct _GstMFVideoEncClass
|
|||
GUID codec_id; /* Output subtype of MFT */
|
||||
guint32 enum_flags; /* MFT_ENUM_FLAG */
|
||||
guint device_index; /* Index of enumerated IMFActivate via MFTEnum */
|
||||
GstMFVideoEncDeviceCaps device_caps;
|
||||
GstMFVideoEncoderDeviceCaps device_caps;
|
||||
|
||||
gboolean (*set_option) (GstMFVideoEnc * mfenc,
|
||||
gboolean (*set_option) (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state,
|
||||
IMFMediaType * output_type);
|
||||
|
||||
gboolean (*set_src_caps) (GstMFVideoEnc * mfenc,
|
||||
gboolean (*set_src_caps) (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state,
|
||||
IMFMediaType * output_type);
|
||||
};
|
||||
|
||||
GType gst_mf_video_enc_get_type (void);
|
||||
GType gst_mf_video_encoder_get_type (void);
|
||||
|
||||
void gst_mf_video_enc_register (GstPlugin * plugin,
|
||||
guint rank,
|
||||
GUID * subtype,
|
||||
GTypeInfo * type_info,
|
||||
GList * d3d11_device);
|
||||
void gst_mf_video_encoder_register (GstPlugin * plugin,
|
||||
guint rank,
|
||||
GUID * subtype,
|
||||
GTypeInfo * type_info,
|
||||
GList * d3d11_device);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_MF_VIDEO_ENC_H__ */
|
|
@ -35,7 +35,7 @@
|
|||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include "gstmfvideoenc.h"
|
||||
#include "gstmfvideoencoder.h"
|
||||
#include "gstmfvp9enc.h"
|
||||
#include <wrl.h>
|
||||
|
||||
|
@ -122,7 +122,7 @@ enum
|
|||
|
||||
typedef struct _GstMFVP9Enc
|
||||
{
|
||||
GstMFVideoEnc parent;
|
||||
GstMFVideoEncoder parent;
|
||||
|
||||
/* properties */
|
||||
guint bitrate;
|
||||
|
@ -139,7 +139,7 @@ typedef struct _GstMFVP9Enc
|
|||
|
||||
typedef struct _GstMFVP9EncClass
|
||||
{
|
||||
GstMFVideoEncClass parent_class;
|
||||
GstMFVideoEncoderClass parent_class;
|
||||
} GstMFVP9EncClass;
|
||||
|
||||
static GstElementClass *parent_class = nullptr;
|
||||
|
@ -148,9 +148,9 @@ static void gst_mf_vp9_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec);
|
||||
static void gst_mf_vp9_enc_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static gboolean gst_mf_vp9_enc_set_option (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_vp9_enc_set_option (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
static gboolean gst_mf_vp9_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
static gboolean gst_mf_vp9_enc_set_src_caps (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type);
|
||||
|
||||
static void
|
||||
|
@ -158,9 +158,9 @@ gst_mf_vp9_enc_class_init (GstMFVP9EncClass * klass, gpointer data)
|
|||
{
|
||||
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
GstMFVideoEncClass *mfenc_class = GST_MF_VIDEO_ENC_CLASS (klass);
|
||||
GstMFVideoEncClassData *cdata = (GstMFVideoEncClassData *) data;
|
||||
GstMFVideoEncDeviceCaps *device_caps = &cdata->device_caps;
|
||||
GstMFVideoEncoderClass *encoder_class = GST_MF_VIDEO_ENCODER_CLASS (klass);
|
||||
GstMFVideoEncoderClassData *cdata = (GstMFVideoEncoderClassData *) data;
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &cdata->device_caps;
|
||||
gchar *long_name;
|
||||
gchar *classification;
|
||||
|
||||
|
@ -301,13 +301,13 @@ gst_mf_vp9_enc_class_init (GstMFVP9EncClass * klass, gpointer data)
|
|||
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
|
||||
cdata->src_caps));
|
||||
|
||||
mfenc_class->set_option = GST_DEBUG_FUNCPTR (gst_mf_vp9_enc_set_option);
|
||||
mfenc_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_vp9_enc_set_src_caps);
|
||||
encoder_class->set_option = GST_DEBUG_FUNCPTR (gst_mf_vp9_enc_set_option);
|
||||
encoder_class->set_src_caps = GST_DEBUG_FUNCPTR (gst_mf_vp9_enc_set_src_caps);
|
||||
|
||||
mfenc_class->codec_id = MFVideoFormat_VP90;
|
||||
mfenc_class->enum_flags = cdata->enum_flags;
|
||||
mfenc_class->device_index = cdata->device_index;
|
||||
mfenc_class->device_caps = *device_caps;
|
||||
encoder_class->codec_id = MFVideoFormat_VP90;
|
||||
encoder_class->enum_flags = cdata->enum_flags;
|
||||
encoder_class->device_index = cdata->device_index;
|
||||
encoder_class->device_caps = *device_caps;
|
||||
|
||||
g_free (cdata->device_name);
|
||||
gst_caps_unref (cdata->sink_caps);
|
||||
|
@ -333,7 +333,7 @@ gst_mf_vp9_enc_get_property (GObject * object, guint prop_id,
|
|||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) (object);
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (object);
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_BITRATE:
|
||||
|
@ -443,14 +443,14 @@ gst_mf_vp9_enc_content_type_to_enum (guint rc_mode)
|
|||
} G_STMT_END
|
||||
|
||||
static gboolean
|
||||
gst_mf_vp9_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
||||
IMFMediaType * output_type)
|
||||
gst_mf_vp9_enc_set_option (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) mfenc;
|
||||
GstMFVideoEncClass *klass = GST_MF_VIDEO_ENC_GET_CLASS (mfenc);
|
||||
GstMFVideoEncDeviceCaps *device_caps = &klass->device_caps;
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) encoder;
|
||||
GstMFVideoEncoderClass *klass = GST_MF_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
GstMFVideoEncoderDeviceCaps *device_caps = &klass->device_caps;
|
||||
HRESULT hr;
|
||||
GstMFTransform *transform = mfenc->transform;
|
||||
GstMFTransform *transform = encoder->transform;
|
||||
|
||||
hr = output_type->SetGUID (MF_MT_SUBTYPE, MFVideoFormat_VP90);
|
||||
if (!gst_mf_result (hr))
|
||||
|
@ -536,10 +536,10 @@ gst_mf_vp9_enc_set_option (GstMFVideoEnc * mfenc, GstVideoCodecState * state,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_vp9_enc_set_src_caps (GstMFVideoEnc * mfenc,
|
||||
gst_mf_vp9_enc_set_src_caps (GstMFVideoEncoder * encoder,
|
||||
GstVideoCodecState * state, IMFMediaType * output_type)
|
||||
{
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) mfenc;
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) encoder;
|
||||
GstVideoCodecState *out_state;
|
||||
GstStructure *s;
|
||||
GstCaps *out_caps;
|
||||
|
@ -586,5 +586,6 @@ gst_mf_vp9_enc_plugin_init (GstPlugin * plugin, guint rank,
|
|||
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_vp9_enc_debug, "mfvp9enc", 0, "mfvp9enc");
|
||||
|
||||
gst_mf_video_enc_register (plugin, rank, &subtype, &type_info, d3d11_device);
|
||||
gst_mf_video_encoder_register (plugin,
|
||||
rank, &subtype, &type_info, d3d11_device);
|
||||
}
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
mf_sources = [
|
||||
'gstmfutils.cpp',
|
||||
'gstmftransform.cpp',
|
||||
'gstmfvideoenc.cpp',
|
||||
'gstmfaacenc.cpp',
|
||||
'gstmfaudioencoder.cpp',
|
||||
'gstmfdevice.cpp',
|
||||
'gstmfh264enc.cpp',
|
||||
'gstmfh265enc.cpp',
|
||||
'gstmfvp9enc.cpp',
|
||||
'gstmfvideosrc.cpp',
|
||||
'gstmfsourceobject.cpp',
|
||||
'gstmfdevice.cpp',
|
||||
'gstmfaudioenc.cpp',
|
||||
'gstmfaacenc.cpp',
|
||||
'gstmfmp3enc.cpp',
|
||||
'gstmfvideobuffer.cpp',
|
||||
'gstmfplatloader.cpp',
|
||||
'gstmfsourceobject.cpp',
|
||||
'gstmftransform.cpp',
|
||||
'gstmfutils.cpp',
|
||||
'gstmfvideobuffer.cpp',
|
||||
'gstmfvideoencoder.cpp',
|
||||
'gstmfvideosrc.cpp',
|
||||
'gstmfvp9enc.cpp',
|
||||
'plugin.cpp',
|
||||
]
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ GST_DEBUG_CATEGORY (gst_mf_utils_debug);
|
|||
GST_DEBUG_CATEGORY (gst_mf_source_object_debug);
|
||||
GST_DEBUG_CATEGORY (gst_mf_transform_debug);
|
||||
GST_DEBUG_CATEGORY (gst_mf_video_buffer_debug);
|
||||
GST_DEBUG_CATEGORY (gst_mf_video_enc_debug);
|
||||
GST_DEBUG_CATEGORY (gst_mf_video_encoder_debug);
|
||||
|
||||
#define GST_CAT_DEFAULT gst_mf_debug
|
||||
|
||||
|
@ -201,8 +201,8 @@ plugin_init (GstPlugin * plugin)
|
|||
"mftransform", 0, "mftransform");
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_video_buffer_debug,
|
||||
"mfvideobuffer", 0, "mfvideobuffer");
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_video_enc_debug,
|
||||
"mfvideoenc", 0, "mfvideoenc");
|
||||
GST_DEBUG_CATEGORY_INIT (gst_mf_video_encoder_debug,
|
||||
"mfvideoencoder", 0, "mfvideoencoder");
|
||||
|
||||
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
|
||||
if (!gst_mf_result (hr)) {
|
||||
|
|
Loading…
Reference in a new issue