mediafoundation: Introduce Microsoft Media Foundation plugin

The Microsoft Media Foundation (MF) is the successor of DirectShow.
This commit includes two kinds of video capture implementation,
one uses IMFSourceReader interface which is available since Windows Vista
and the other is based on IMFCaptureEngine interface which is available
since Windows 8.
Note that this new video source element cannot be used in UWP app
for now, since device activation using those APIs are not allowed by MS.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/760>
This commit is contained in:
Seungha Yang 2019-10-07 21:49:26 +09:00 committed by GStreamer Merge Bot
parent 9082f103a4
commit eece89042a
14 changed files with 3025 additions and 0 deletions

View file

@ -115,6 +115,7 @@ option('ladspa', type : 'feature', value : 'auto', description : 'LADSPA plugin
option('libde265', type : 'feature', value : 'auto', description : 'HEVC/H.265 video decoder plugin')
option('libmms', type : 'feature', value : 'auto', description : 'Microsoft multimedia server network source plugin')
option('lv2', type : 'feature', value : 'auto', description : 'LV2 audio plugin bridge')
option('mediafoundation', type : 'feature', value : 'auto', description : 'Microsoft Media Foundation plugin')
option('microdns', type : 'feature', value : 'auto', description : 'libmicrodns-based device provider')
option('modplug', type : 'feature', value : 'auto', description : 'ModPlug audio decoder plugin')
option('mpeg2enc', type : 'feature', value : 'auto', description : 'mpeg2enc video encoder plugin')

View file

@ -0,0 +1,893 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/video/video.h>
#include "gstmfcaptureengine.h"
#include "gstmfutils.h"
#include <mfcaptureengine.h>
#include <string.h>
#include <wrl.h>
using namespace Microsoft::WRL;
extern "C" {
GST_DEBUG_CATEGORY_EXTERN (gst_mf_source_object_debug);
#define GST_CAT_DEFAULT gst_mf_source_object_debug
}
static HRESULT gst_mf_capture_engine_on_event (GstMFCaptureEngine * engine,
IMFMediaEvent * event);
static HRESULT gst_mf_capture_engine_on_sample (GstMFCaptureEngine * engine,
IMFSample * sample);
class GstMFCaptureEngineCallbackObject
: public IMFCaptureEngineOnSampleCallback
, public IMFCaptureEngineOnEventCallback
{
public:
GstMFCaptureEngineCallbackObject (GstMFCaptureEngine * listener)
: _listener (listener)
, _ref_count (1)
{
if (_listener)
g_object_weak_ref (G_OBJECT (_listener),
(GWeakNotify) GstMFCaptureEngineCallbackObject::OnWeakNotify, this);
}
STDMETHOD (QueryInterface) (REFIID riid, void ** object)
{
HRESULT hr = E_NOINTERFACE;
if (IsEqualIID (riid, IID_IUnknown)) {
*object = this;
hr = S_OK;
} else if (IsEqualIID (riid, IID_IMFCaptureEngineOnSampleCallback)) {
*object = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
hr = S_OK;
} else if (IsEqualIID (riid, IID_IMFCaptureEngineOnEventCallback)) {
*object = static_cast<IMFCaptureEngineOnEventCallback*>(this);
hr = S_OK;
}
if (SUCCEEDED (hr))
AddRef();
return hr;
}
STDMETHOD_ (ULONG, AddRef) (void)
{
return InterlockedIncrement (&this->_ref_count);
}
STDMETHOD_ (ULONG, Release) (void)
{
ULONG ref_count;
ref_count = InterlockedDecrement (&this->_ref_count);
if (ref_count == 0)
delete this;
return ref_count;
}
STDMETHOD (OnSample) (IMFSample * sample)
{
if (!sample) {
return S_OK;
}
if (this->_listener)
return gst_mf_capture_engine_on_sample (this->_listener, sample);
return S_OK;
}
STDMETHOD (OnEvent) (IMFMediaEvent * event)
{
if (this->_listener)
return gst_mf_capture_engine_on_event (this->_listener, event);
return S_OK;
}
private:
~GstMFCaptureEngineCallbackObject ()
{
if (_listener)
g_object_weak_unref (G_OBJECT (_listener),
(GWeakNotify) GstMFCaptureEngineCallbackObject::OnWeakNotify, this);
}
static void
OnWeakNotify (GstMFCaptureEngineCallbackObject * self, GObject * object)
{
self->_listener = NULL;
}
GstMFCaptureEngine * _listener;
volatile ULONG _ref_count;
};
typedef enum
{
GST_MF_CAPTURE_ENGINE_EVENT_NONE,
GST_MF_CAPTURE_ENGINE_EVENT_ALL_EFFECTS_REMOVED,
GST_MF_CAPTURE_ENGINE_EVENT_CAMERA_STREAM_BLOCKED,
GST_MF_CAPTURE_ENGINE_EVENT_CAMERA_STREAM_UNBLOCKED,
GST_MF_CAPTURE_ENGINE_EVENT_EFFECT_ADDED,
GST_MF_CAPTURE_ENGINE_EVENT_EFFECT_REMOVED,
GST_MF_CAPTURE_ENGINE_EVENT_ERROR,
GST_MF_CAPTURE_ENGINE_EVENT_INITIALIZED,
GST_MF_CAPTURE_ENGINE_EVENT_PHOTO_TAKEN,
GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STARTED,
GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STOPPED,
GST_MF_CAPTURE_ENGINE_EVENT_RECORD_STARTED,
GST_MF_CAPTURE_ENGINE_EVENT_RECORD_STOPPED,
GST_MF_CAPTURE_ENGINE_EVENT_SINK_PREPARED,
GST_MF_CAPTURE_ENGINE_EVENT_SOURCE_CURRENT_DEVICE_MEDIA_TYPE_SET,
} GstMFCaptureEngineEvent;
typedef struct
{
const GUID & mf_event;
GstMFCaptureEngineEvent event;
const gchar *name;
} GstMFCaptureEngineEventMap;
static const GstMFCaptureEngineEventMap mf_event_map[] = {
{MF_CAPTURE_ENGINE_ALL_EFFECTS_REMOVED,
GST_MF_CAPTURE_ENGINE_EVENT_ALL_EFFECTS_REMOVED, "all-effects-removed"},
{MF_CAPTURE_ENGINE_CAMERA_STREAM_BLOCKED,
GST_MF_CAPTURE_ENGINE_EVENT_CAMERA_STREAM_BLOCKED,
"camera-stream-blocked"},
{MF_CAPTURE_ENGINE_CAMERA_STREAM_UNBLOCKED,
GST_MF_CAPTURE_ENGINE_EVENT_CAMERA_STREAM_UNBLOCKED,
"camera-stream-unblocked"},
{MF_CAPTURE_ENGINE_EFFECT_ADDED,
GST_MF_CAPTURE_ENGINE_EVENT_EFFECT_ADDED, "effect-added"},
{MF_CAPTURE_ENGINE_EFFECT_REMOVED,
GST_MF_CAPTURE_ENGINE_EVENT_EFFECT_REMOVED, "effect-removed"},
{MF_CAPTURE_ENGINE_ERROR,
GST_MF_CAPTURE_ENGINE_EVENT_ERROR, "error"},
{MF_CAPTURE_ENGINE_INITIALIZED,
GST_MF_CAPTURE_ENGINE_EVENT_INITIALIZED, "initialized"},
{MF_CAPTURE_ENGINE_PHOTO_TAKEN,
GST_MF_CAPTURE_ENGINE_EVENT_PHOTO_TAKEN, "photo-taken"},
{MF_CAPTURE_ENGINE_PREVIEW_STARTED,
GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STARTED, "preview-started"},
{MF_CAPTURE_ENGINE_PREVIEW_STOPPED,
GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STOPPED, "preview-stopped"},
{MF_CAPTURE_ENGINE_RECORD_STARTED,
GST_MF_CAPTURE_ENGINE_EVENT_RECORD_STARTED, "record-started"},
{MF_CAPTURE_ENGINE_RECORD_STOPPED,
GST_MF_CAPTURE_ENGINE_EVENT_RECORD_STOPPED, "record-stopped"},
{MF_CAPTURE_SINK_PREPARED,
GST_MF_CAPTURE_ENGINE_EVENT_SINK_PREPARED, "sink-prepared"},
{MF_CAPTURE_SOURCE_CURRENT_DEVICE_MEDIA_TYPE_SET,
GST_MF_CAPTURE_ENGINE_EVENT_SOURCE_CURRENT_DEVICE_MEDIA_TYPE_SET,
"source-current-device-media-type-set"}
};
static const GstMFCaptureEngineEventMap *
gst_mf_capture_engine_get_event_map (const GUID * event_type)
{
gint i;
for (i = 0; i < G_N_ELEMENTS (mf_event_map); i++) {
if (IsEqualGUID (*event_type, mf_event_map[i].mf_event))
return &mf_event_map[i];
}
return NULL;
}
typedef struct _GstMFStreamMediaType
{
IMFMediaType *media_type;
/* the stream index of media type */
guint stream_index;
/* the media index in the stream index */
guint media_type_index;
GstCaps *caps;
} GstMFStreamMediaType;
struct _GstMFCaptureEngine
{
GstMFSourceObject parent;
GMutex lock;
GCond cond;
/* protected by lock */
GQueue *queue;
GstMFCaptureEngineEvent last_event;
IMFMediaSource *source;
IMFCaptureEngine *engine;
GstMFCaptureEngineCallbackObject *callback_obj;
GstCaps *supported_caps;
GList *media_types;
GstMFStreamMediaType *cur_type;
GstVideoInfo info;
gboolean started;
gboolean flushing;
};
static void gst_mf_capture_engine_constructed (GObject * object);
static void gst_mf_capture_engine_finalize (GObject * object);
static gboolean gst_mf_capture_engine_start (GstMFSourceObject * object);
static gboolean gst_mf_capture_engine_stop (GstMFSourceObject * object);
static GstFlowReturn gst_mf_capture_engine_fill (GstMFSourceObject * object,
GstBuffer * buffer);
static gboolean gst_mf_capture_engine_unlock (GstMFSourceObject * object);
static gboolean gst_mf_capture_engine_unlock_stop (GstMFSourceObject * object);
static GstCaps *gst_mf_capture_engine_get_caps (GstMFSourceObject * object);
static gboolean gst_mf_capture_engine_set_caps (GstMFSourceObject * object,
GstCaps * caps);
#define gst_mf_capture_engine_parent_class parent_class
G_DEFINE_TYPE (GstMFCaptureEngine, gst_mf_capture_engine,
GST_TYPE_MF_SOURCE_OBJECT);
static void
gst_mf_capture_engine_class_init (GstMFCaptureEngineClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstMFSourceObjectClass *source_class = GST_MF_SOURCE_OBJECT_CLASS (klass);
gobject_class->constructed = gst_mf_capture_engine_constructed;
gobject_class->finalize = gst_mf_capture_engine_finalize;
source_class->start = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_start);
source_class->stop = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_stop);
source_class->fill = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_fill);
source_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_unlock);
source_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_mf_capture_engine_unlock_stop);
source_class->get_caps = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_get_caps);
source_class->set_caps = GST_DEBUG_FUNCPTR (gst_mf_capture_engine_set_caps);
}
static void
gst_mf_capture_engine_init (GstMFCaptureEngine * self)
{
self->queue = g_queue_new ();
g_mutex_init (&self->lock);
g_cond_init (&self->cond);
CoInitializeEx (NULL, COINIT_MULTITHREADED);
}
static gboolean
gst_mf_enum_media_type_from_video_capture_source (IMFCaptureSource
* capture_source, GList ** media_types)
{
gint i, j;
HRESULT hr;
GList *list = NULL;
g_return_val_if_fail (capture_source != NULL, FALSE);
g_return_val_if_fail (media_types != NULL, FALSE);
for (i = 0;; i++) {
MF_CAPTURE_ENGINE_STREAM_CATEGORY category;
hr = capture_source->GetDeviceStreamCategory (i, &category);
if (FAILED (hr)) {
GST_DEBUG ("failed to get %dth stream category, hr:0x%x", i, (guint) hr);
break;
}
GST_DEBUG ("%dth capture source category %d", i, category);
if (category != MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_PREVIEW &&
category != MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_CAPTURE)
continue;
for (j = 0;; j++) {
ComPtr<IMFMediaType> media_type;
hr = capture_source->GetAvailableDeviceMediaType (i, j, &media_type);
if (SUCCEEDED (hr)) {
GstMFStreamMediaType *mtype;
GstCaps *caps = NULL;
caps = gst_mf_media_type_to_caps (media_type.Get ());
/* unknown format */
if (!caps)
continue;
mtype = g_new0 (GstMFStreamMediaType, 1);
mtype->media_type = media_type.Detach ();
mtype->stream_index = i;
mtype->media_type_index = j;
mtype->caps = caps;
GST_DEBUG ("StreamIndex %d, MediaTypeIndex %d, %" GST_PTR_FORMAT,
i, j, caps);
list = g_list_prepend (list, mtype);
} else if (hr == MF_E_NO_MORE_TYPES) {
/* no more media type in this stream index, try next stream index */
break;
} else if (hr == MF_E_INVALIDSTREAMNUMBER) {
/* no more streams and media types */
goto done;
} else {
/* undefined return */
goto done;
}
}
}
done:
list = g_list_reverse (list);
*media_types = list;
return ! !list;
}
static void
gst_mf_stream_media_type_free (GstMFStreamMediaType * media_type)
{
g_return_if_fail (media_type != NULL);
if (media_type->media_type)
media_type->media_type->Release ();
if (media_type->caps)
gst_caps_unref (media_type->caps);
g_free (media_type);
}
static gboolean
gst_mf_capture_engine_create (GstMFCaptureEngine * self,
IMFActivate * activate)
{
GList *iter;
ComPtr<IMFCaptureEngineClassFactory> factory;
ComPtr<IMFCaptureEngine> engine;
ComPtr<IMFMediaSource> source;
ComPtr<IMFCaptureSource> capture_source;
ComPtr<IMFAttributes> attr;
HRESULT hr;
GstMFCaptureEngineCallbackObject *callback_obj = NULL;
GstMFCaptureEngineEvent last_event;
hr = activate->ActivateObject (IID_IMFMediaSource, (void **) &source);
if (!gst_mf_result (hr))
return FALSE;
hr = CoCreateInstance (CLSID_MFCaptureEngineClassFactory,
NULL, CLSCTX_INPROC_SERVER,
IID_IMFCaptureEngineClassFactory, (void **) &factory);
if (!gst_mf_result (hr))
return FALSE;
hr = factory->CreateInstance (CLSID_MFCaptureEngine,
IID_IMFCaptureEngine, (void **) &engine);
if (!gst_mf_result (hr))
return FALSE;
hr = MFCreateAttributes (&attr, 1);
if (!gst_mf_result (hr))
return FALSE;
hr = attr->SetUINT32 (MF_CAPTURE_ENGINE_USE_VIDEO_DEVICE_ONLY, TRUE);
if (!gst_mf_result (hr))
return FALSE;
callback_obj = new GstMFCaptureEngineCallbackObject (self);
self->last_event = GST_MF_CAPTURE_ENGINE_EVENT_NONE;
GST_DEBUG_OBJECT (self, "Start init capture engine");
hr = engine->Initialize ((IMFCaptureEngineOnEventCallback *) callback_obj,
attr.Get (), NULL, source.Get ());
if (!gst_mf_result (hr)) {
callback_obj->Release ();
return FALSE;
}
/* wait initialized event */
g_mutex_lock (&self->lock);
while (self->last_event != GST_MF_CAPTURE_ENGINE_EVENT_ERROR &&
self->last_event != GST_MF_CAPTURE_ENGINE_EVENT_INITIALIZED)
g_cond_wait (&self->cond, &self->lock);
last_event = self->last_event;
g_mutex_unlock (&self->lock);
if (last_event == GST_MF_CAPTURE_ENGINE_EVENT_ERROR) {
GST_ERROR_OBJECT (self, "Failed to initialize");
callback_obj->Release ();
return FALSE;
}
GST_DEBUG_OBJECT (self, "Finish init capture engine");
hr = engine->GetSource (&capture_source);
if (!gst_mf_result (hr)) {
callback_obj->Release ();
return FALSE;
}
if (!gst_mf_enum_media_type_from_video_capture_source (capture_source.Get (),
&self->media_types)) {
GST_ERROR_OBJECT (self, "No available media types");
callback_obj->Release ();
return FALSE;
}
self->source = source.Detach ();
self->engine = engine.Detach ();
self->callback_obj = callback_obj;
for (iter = self->media_types; iter; iter = g_list_next (iter)) {
GstMFStreamMediaType *mtype = (GstMFStreamMediaType *) iter->data;
if (!self->supported_caps)
self->supported_caps = gst_caps_ref (mtype->caps);
else
self->supported_caps =
gst_caps_merge (self->supported_caps, gst_caps_ref (mtype->caps));
}
GST_DEBUG_OBJECT (self, "Available output caps %" GST_PTR_FORMAT,
self->supported_caps);
return TRUE;
}
static void
gst_mf_capture_engine_constructed (GObject * object)
{
GstMFSourceObject *source = GST_MF_SOURCE_OBJECT (object);
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
GList *activate_list = NULL;
GstMFDeviceActivate *target = NULL;
GList *iter;
if (!gst_mf_source_enum_device_activate (source->soure_type, &activate_list)) {
GST_WARNING_OBJECT (self, "No available video capture device");
return;
}
#ifndef GST_DISABLE_GST_DEBUG
for (iter = activate_list; iter; iter = g_list_next (iter)) {
GstMFDeviceActivate *activate = (GstMFDeviceActivate *) iter->data;
GST_DEBUG_OBJECT (self, "device %d, name: \"%s\", path: \"%s\"",
activate->index, GST_STR_NULL (activate->name),
GST_STR_NULL (activate->path));
}
#endif
for (iter = activate_list; iter; iter = g_list_next (iter)) {
GstMFDeviceActivate *activate = (GstMFDeviceActivate *) iter->data;
gboolean match;
if (source->device_path && strlen (source->device_path) > 0) {
match = g_ascii_strcasecmp (activate->path, source->device_path) == 0;
} else if (source->device_name && strlen (source->device_name) > 0) {
match = g_ascii_strcasecmp (activate->name, source->device_name) == 0;
} else if (source->device_index >= 0) {
match = activate->index == source->device_index;
} else {
/* pick the first entry */
match = TRUE;
}
if (match) {
target = activate;
break;
}
}
if (target)
gst_mf_capture_engine_create (self, target->handle);
if (activate_list)
g_list_free_full (activate_list,
(GDestroyNotify) gst_mf_device_activate_free);
}
static void
release_mf_buffer (IMFMediaBuffer * buffer)
{
if (buffer)
buffer->Release ();
}
static void
gst_mf_capture_engine_finalize (GObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
gst_clear_caps (&self->supported_caps);
if (self->media_types)
g_list_free_full (self->media_types,
(GDestroyNotify) gst_mf_stream_media_type_free);
gst_mf_capture_engine_stop (GST_MF_SOURCE_OBJECT (self));
g_queue_free_full (self->queue, (GDestroyNotify) release_mf_buffer);
if (self->callback_obj) {
self->callback_obj->Release ();
}
if (self->engine)
self->engine->Release ();
if (self->source) {
self->source->Shutdown ();
self->source->Release ();
}
g_mutex_clear (&self->lock);
g_cond_clear (&self->cond);
CoUninitialize ();
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static HRESULT
gst_mf_capture_engine_on_sample (GstMFCaptureEngine * self, IMFSample * sample)
{
HRESULT hr;
DWORD count = 0, i;
if (!sample)
return S_OK;
hr = sample->GetBufferCount (&count);
if (!gst_mf_result (hr) || !count)
return S_OK;
g_mutex_lock (&self->lock);
if (self->flushing) {
g_mutex_unlock (&self->lock);
return S_OK;
}
for (i = 0; i < count; i++) {
IMFMediaBuffer *buffer = NULL;
hr = sample->GetBufferByIndex (i, &buffer);
if (!gst_mf_result (hr) || !buffer)
continue;
g_queue_push_tail (self->queue, buffer);
}
g_cond_broadcast (&self->cond);
g_mutex_unlock (&self->lock);
return S_OK;
}
static HRESULT
gst_mf_capture_engine_on_event (GstMFCaptureEngine * self,
IMFMediaEvent * event)
{
const GstMFCaptureEngineEventMap *event_map;
HRESULT hr;
GUID event_type;
hr = event->GetExtendedType (&event_type);
if (!gst_mf_result (hr))
return hr;
event_map = gst_mf_capture_engine_get_event_map (&event_type);
if (!event_map) {
GST_WARNING_OBJECT (self, "Unknown event");
return S_OK;
}
GST_DEBUG_OBJECT (self, "Got event %s", event_map->name);
g_mutex_lock (&self->lock);
self->last_event = event_map->event;
switch (event_map->event) {
case GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STARTED:
self->started = TRUE;
break;
case GST_MF_CAPTURE_ENGINE_EVENT_PREVIEW_STOPPED:
self->started = FALSE;
break;
default:
break;
}
g_cond_signal (&self->cond);
g_mutex_unlock (&self->lock);
return S_OK;
}
static gboolean
gst_mf_capture_engine_start (GstMFSourceObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
HRESULT hr;
ComPtr<IMFCaptureSink> sink;
ComPtr<IMFCapturePreviewSink> preview_sink;
DWORD sink_stream_index = 0;
IMFMediaType *media_type;
if (!self->cur_type) {
GST_ERROR_OBJECT (self, "MediaType wasn't specified");
return FALSE;
}
media_type = self->cur_type->media_type;
hr = media_type->SetUINT32 (MF_MT_DEFAULT_STRIDE,
GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0));
if (!gst_mf_result (hr))
return FALSE;
hr = self->engine->GetSink (MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, &sink);
if (!gst_mf_result (hr))
return FALSE;
hr = sink.As (&preview_sink);
if (!gst_mf_result (hr))
return FALSE;
hr = preview_sink->RemoveAllStreams ();
if (!gst_mf_result (hr))
return FALSE;
hr = preview_sink->AddStream (self->cur_type->stream_index,
media_type, NULL, &sink_stream_index);
if (!gst_mf_result (hr))
return FALSE;
hr = preview_sink->SetSampleCallback (sink_stream_index,
(IMFCaptureEngineOnSampleCallback *) self->callback_obj);
if (!gst_mf_result (hr))
return FALSE;
hr = self->engine->StartPreview ();
if (!gst_mf_result (hr))
return FALSE;
g_mutex_lock (&self->lock);
while (!self->started)
g_cond_wait (&self->cond, &self->lock);
g_mutex_unlock (&self->lock);
return TRUE;
}
static gboolean
gst_mf_capture_engine_stop (GstMFSourceObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
HRESULT hr;
if (self->engine && self->started) {
GST_DEBUG_OBJECT (self, "Stopping preview");
hr = self->engine->StopPreview ();
if (gst_mf_result (hr)) {
g_mutex_lock (&self->lock);
while (self->started)
g_cond_wait (&self->cond, &self->lock);
g_mutex_unlock (&self->lock);
GST_DEBUG_OBJECT (self, "Preview stopped");
} else {
GST_WARNING_OBJECT (self,
"Failed to stopping preivew, hr: 0x%x", (guint) hr);
}
}
return TRUE;
}
static GstFlowReturn
gst_mf_capture_engine_fill (GstMFSourceObject * object, GstBuffer * buffer)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
GstFlowReturn ret = GST_FLOW_OK;
HRESULT hr;
GstVideoFrame frame;
BYTE *data;
gint i, j;
ComPtr<IMFMediaBuffer> media_buffer;
g_mutex_lock (&self->lock);
if (self->last_event == GST_MF_CAPTURE_ENGINE_EVENT_ERROR) {
g_mutex_unlock (&self->lock);
return GST_FLOW_ERROR;
}
if (self->flushing) {
g_mutex_unlock (&self->lock);
return GST_FLOW_FLUSHING;
}
while (!self->flushing && g_queue_is_empty (self->queue))
g_cond_wait (&self->cond, &self->lock);
if (self->flushing) {
g_mutex_unlock (&self->lock);
return GST_FLOW_FLUSHING;
}
media_buffer.Attach ((IMFMediaBuffer *) g_queue_pop_head (self->queue));
g_mutex_unlock (&self->lock);
hr = media_buffer->Lock (&data, NULL, NULL);
if (!gst_mf_result (hr)) {
GST_ERROR_OBJECT (self, "Failed to lock media buffer");
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_WRITE)) {
GST_ERROR_OBJECT (self, "Failed to map buffer");
media_buffer->Unlock ();
return GST_FLOW_ERROR;
}
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->info); i++) {
guint8 *src, *dst;
gint src_stride, dst_stride;
gint width;
src = data + GST_VIDEO_INFO_PLANE_OFFSET (&self->info, i);
dst = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame, i);
src_stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, i);
dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&frame, i);
width = GST_VIDEO_INFO_COMP_WIDTH (&self->info, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (&self->info, i);
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
memcpy (dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
gst_video_frame_unmap (&frame);
media_buffer->Unlock ();
return ret;
}
static gboolean
gst_mf_capture_engine_unlock (GstMFSourceObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
g_mutex_lock (&self->lock);
if (self->flushing) {
g_mutex_unlock (&self->lock);
return TRUE;
}
self->flushing = TRUE;
g_cond_broadcast (&self->cond);
g_mutex_unlock (&self->lock);
return TRUE;
}
static gboolean
gst_mf_capture_engine_unlock_stop (GstMFSourceObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
g_mutex_lock (&self->lock);
if (!self->flushing) {
g_mutex_unlock (&self->lock);
return TRUE;
}
self->flushing = FALSE;
g_cond_broadcast (&self->cond);
g_mutex_unlock (&self->lock);
return TRUE;
}
static GstCaps *
gst_mf_capture_engine_get_caps (GstMFSourceObject * object)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
if (self->supported_caps)
return gst_caps_ref (self->supported_caps);
return NULL;
}
static gboolean
gst_mf_capture_engine_set_caps (GstMFSourceObject * object, GstCaps * caps)
{
GstMFCaptureEngine *self = GST_MF_CAPTURE_ENGINE (object);
GList *iter;
GstMFStreamMediaType *best_type = NULL;
for (iter = self->media_types; iter; iter = g_list_next (iter)) {
GstMFStreamMediaType *minfo = (GstMFStreamMediaType *) iter->data;
if (gst_caps_is_subset (minfo->caps, caps)) {
best_type = minfo;
break;
}
}
if (!best_type) {
GST_ERROR_OBJECT (self,
"Could not determine target media type with given caps %"
GST_PTR_FORMAT, caps);
return FALSE;
}
self->cur_type = best_type;
gst_video_info_from_caps (&self->info, best_type->caps);
return TRUE;
}
GstMFSourceObject *
gst_mf_capture_engine_new (GstMFSourceType type, gint device_index,
const gchar * device_name, const gchar * device_path)
{
GstMFCaptureEngine *self;
gchar *name;
gchar *path;
/* TODO: add more type */
g_return_val_if_fail (type == GST_MF_SOURCE_TYPE_VIDEO, NULL);
name = device_name ? g_strdup (device_name) : g_strdup ("");
path = device_path ? g_strdup (device_path) : g_strdup ("");
self = (GstMFCaptureEngine *) g_object_new (GST_TYPE_MF_CAPTURE_ENGINE,
"source-type", type, "device-index", device_index, "device-name", name,
"device-path", path, NULL);
gst_object_ref_sink (self);
g_free (name);
g_free (path);
if (!self->source) {
gst_clear_object (&self);
return NULL;
}
return GST_MF_SOURCE_OBJECT (self);
}

View file

@ -0,0 +1,40 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_CAPTURE_ENGINE_H__
#define __GST_MF_CAPTURE_ENGINE_H__
#include <gst/gst.h>
#include "gstmfsourceobject.h"
G_BEGIN_DECLS
#define GST_TYPE_MF_CAPTURE_ENGINE (gst_mf_capture_engine_get_type())
G_DECLARE_FINAL_TYPE (GstMFCaptureEngine, gst_mf_capture_engine,
GST, MF_CAPTURE_ENGINE, GstMFSourceObject);
GstMFSourceObject * gst_mf_capture_engine_new (GstMFSourceType type,
gint device_index,
const gchar * device_name,
const gchar * device_path);
G_END_DECLS
#endif /* __GST_MF_CAPTURE_ENGINE_H__ */

View file

@ -0,0 +1,346 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstmfsourceobject.h"
GST_DEBUG_CATEGORY_EXTERN (gst_mf_source_object_debug);
#define GST_CAT_DEFAULT gst_mf_source_object_debug
enum
{
PROP_0,
PROP_DEVICE_PATH,
PROP_DEVICE_NAME,
PROP_DEVICE_INDEX,
PROP_SOURCE_TYPE,
};
#define DEFAULT_DEVICE_PATH NULL
#define DEFAULT_DEVICE_NAME NULL
#define DEFAULT_DEVICE_INDEX -1
#define DEFAULT_SOURCE_TYPE GST_MF_SOURCE_TYPE_VIDEO
GType
gst_mf_source_type_get_type (void)
{
static GType source_type = 0;
static const GEnumValue source_types[] = {
{GST_MF_SOURCE_TYPE_VIDEO, "Video", "video"},
{0, NULL, NULL}
};
if (!source_type) {
source_type = g_enum_register_static ("GstMFSourceMode", source_types);
}
return source_type;
}
static void gst_mf_source_object_finalize (GObject * object);
static void gst_mf_source_object_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_mf_source_object_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
#define gst_mf_source_object_parent_class parent_class
G_DEFINE_ABSTRACT_TYPE (GstMFSourceObject, gst_mf_source_object,
GST_TYPE_OBJECT);
static void
gst_mf_source_object_class_init (GstMFSourceObjectClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->finalize = gst_mf_source_object_finalize;
gobject_class->get_property = gst_mf_source_object_get_property;
gobject_class->set_property = gst_mf_source_object_set_property;
g_object_class_install_property (gobject_class, PROP_DEVICE_PATH,
g_param_spec_string ("device-path", "Device Path",
"The device path", DEFAULT_DEVICE_PATH,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
g_param_spec_string ("device-name", "Device Name",
"The human-readable device name", DEFAULT_DEVICE_NAME,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
g_param_spec_int ("device-index", "Device Index",
"The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_SOURCE_TYPE,
g_param_spec_enum ("source-type", "Source Type",
"Source Type", GST_TYPE_MF_SOURCE_TYPE,
DEFAULT_SOURCE_TYPE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
}
static void
gst_mf_source_object_init (GstMFSourceObject * self)
{
self->device_index = DEFAULT_DEVICE_INDEX;
self->soure_type = DEFAULT_SOURCE_TYPE;
}
static void
gst_mf_source_object_finalize (GObject * object)
{
GstMFSourceObject *self = GST_MF_SOURCE_OBJECT (object);
g_free (self->device_path);
g_free (self->device_name);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_mf_source_object_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstMFSourceObject *self = GST_MF_SOURCE_OBJECT (object);
switch (prop_id) {
case PROP_DEVICE_PATH:
g_value_set_string (value, self->device_path);
break;
case PROP_DEVICE_NAME:
g_value_set_string (value, self->device_name);
break;
case PROP_DEVICE_INDEX:
g_value_set_int (value, self->device_index);
break;
case PROP_SOURCE_TYPE:
g_value_set_enum (value, self->soure_type);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_mf_source_object_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstMFSourceObject *self = GST_MF_SOURCE_OBJECT (object);
switch (prop_id) {
case PROP_DEVICE_PATH:
g_free (self->device_path);
self->device_path = g_value_dup_string (value);
break;
case PROP_DEVICE_NAME:
g_free (self->device_name);
self->device_name = g_value_dup_string (value);
break;
case PROP_DEVICE_INDEX:
self->device_index = g_value_get_int (value);
break;
case PROP_SOURCE_TYPE:
self->soure_type = g_value_get_enum (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
gboolean
gst_mf_source_object_start (GstMFSourceObject * object)
{
GstMFSourceObjectClass *klass;
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), FALSE);
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
g_assert (klass->start != NULL);
return klass->start (object);
}
gboolean
gst_mf_source_object_stop (GstMFSourceObject * object)
{
GstMFSourceObjectClass *klass;
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), FALSE);
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
g_assert (klass->stop != NULL);
return klass->stop (object);
}
GstFlowReturn
gst_mf_source_object_fill (GstMFSourceObject * object, GstBuffer * buffer)
{
GstMFSourceObjectClass *klass;
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), GST_FLOW_ERROR);
g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
g_assert (klass->fill != NULL);
return klass->fill (object, buffer);
}
void
gst_mf_source_object_set_flushing (GstMFSourceObject * object,
gboolean flushing)
{
GstMFSourceObjectClass *klass;
g_return_if_fail (GST_IS_MF_SOURCE_OBJECT (object));
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
if (flushing) {
if (klass->unlock)
klass->unlock (object);
} else {
if (klass->unlock_stop)
klass->unlock_stop (object);
}
}
gboolean
gst_mf_source_object_set_caps (GstMFSourceObject * object, GstCaps * caps)
{
GstMFSourceObjectClass *klass;
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), FALSE);
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
g_assert (klass->set_caps != NULL);
return klass->set_caps (object, caps);
}
GstCaps *
gst_mf_source_object_get_caps (GstMFSourceObject * object)
{
GstMFSourceObjectClass *klass;
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), NULL);
klass = GST_MF_SOURCE_OBJECT_GET_CLASS (object);
g_assert (klass->get_caps != NULL);
return klass->get_caps (object);
}
gboolean
gst_mf_source_enum_device_activate (GstMFSourceType source_type,
GList ** device_sources)
{
HRESULT hr;
GList *ret = NULL;
IMFAttributes *attr = NULL;
IMFActivate **devices = NULL;
UINT32 i, count = 0;
hr = MFCreateAttributes (&attr, 1);
if (!gst_mf_result (hr)) {
return FALSE;
}
switch (source_type) {
case GST_MF_SOURCE_TYPE_VIDEO:
hr = IMFAttributes_SetGUID (attr, &MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
&MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
break;
default:
GST_ERROR ("Unknown source type %d", source_type);
return FALSE;
}
if (!gst_mf_result (hr))
return FALSE;
hr = MFEnumDeviceSources (attr, &devices, &count);
if (!gst_mf_result (hr)) {
IMFAttributes_Release (attr);
return FALSE;
}
for (i = 0; i < count; i++) {
GstMFDeviceActivate *entry;
LPWSTR name;
UINT32 name_len;
IMFActivate *activate = devices[i];
switch (source_type) {
case GST_MF_SOURCE_TYPE_VIDEO:
hr = IMFActivate_GetAllocatedString (activate,
&MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK,
&name, &name_len);
break;
default:
g_assert_not_reached ();
goto done;
}
entry = g_new0 (GstMFDeviceActivate, 1);
entry->index = i;
entry->handle = activate;
if (gst_mf_result (hr)) {
entry->path = g_utf16_to_utf8 ((const gunichar2 *) name,
-1, NULL, NULL, NULL);
CoTaskMemFree (name);
}
hr = IMFActivate_GetAllocatedString (activate,
&MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &name_len);
if (gst_mf_result (hr)) {
entry->name = g_utf16_to_utf8 ((const gunichar2 *) name,
-1, NULL, NULL, NULL);
CoTaskMemFree (name);
}
ret = g_list_prepend (ret, entry);
}
done:
ret = g_list_reverse (ret);
CoTaskMemFree (devices);
*device_sources = ret;
return ! !ret;
}
void
gst_mf_device_activate_free (GstMFDeviceActivate * activate)
{
g_return_if_fail (activate != NULL);
if (activate->handle)
IMFActivate_Release (activate->handle);
g_free (activate->name);
g_free (activate->path);
g_free (activate);
}

View file

@ -0,0 +1,114 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_SOURCE_OBJECT_H__
#define __GST_MF_SOURCE_OBJECT_H__
#include <gst/gst.h>
#include "gstmfutils.h"
G_BEGIN_DECLS
#define GST_TYPE_MF_SOURCE_OBJECT (gst_mf_source_object_get_type())
#define GST_MF_SOURCE_OBJECT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_MF_SOURCE_OBJECT, GstMFSourceObject))
#define GST_MF_SOURCE_OBJECT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MF_SOURCE_OBJECT, GstMFSourceObjectClass))
#define GST_IS_MF_SOURCE_OBJECT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_MF_SOURCE_OBJECT))
#define GST_IS_MF_SOURCE_OBJECT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MF_SOURCE_OBJECT))
#define GST_MF_SOURCE_OBJECT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_MF_SOURCE_OBJECT, GstMFSourceObjectClass))
typedef struct _GstMFSourceObject GstMFSourceObject;
typedef struct _GstMFSourceObjectClass GstMFSourceObjectClass;
typedef enum
{
GST_MF_SOURCE_TYPE_VIDEO,
} GstMFSourceType;
#define GST_TYPE_MF_SOURCE_TYPE (gst_mf_source_type_get_type())
GType gst_mf_source_type_get_type (void);
typedef struct
{
IMFActivate *handle;
guint index;
gchar *name;
gchar *path;
} GstMFDeviceActivate;
struct _GstMFSourceObject
{
GstObject parent;
GstMFSourceType soure_type;
gchar *device_path;
gchar *device_name;
gint device_index;
};
struct _GstMFSourceObjectClass
{
GstObjectClass parent_class;
gboolean (*start) (GstMFSourceObject * object);
gboolean (*stop) (GstMFSourceObject * object);
GstFlowReturn (*fill) (GstMFSourceObject * object,
GstBuffer * buffer);
gboolean (*unlock) (GstMFSourceObject * object);
gboolean (*unlock_stop) (GstMFSourceObject * object);
GstCaps * (*get_caps) (GstMFSourceObject * object);
gboolean (*set_caps) (GstMFSourceObject * object,
GstCaps * caps);
};
GType gst_mf_source_object_get_type (void);
gboolean gst_mf_source_object_start (GstMFSourceObject * object);
gboolean gst_mf_source_object_stop (GstMFSourceObject * object);
GstFlowReturn gst_mf_source_object_fill (GstMFSourceObject * object,
GstBuffer * buffer);
void gst_mf_source_object_set_flushing (GstMFSourceObject * object,
gboolean flushing);
GstCaps * gst_mf_source_object_get_caps (GstMFSourceObject * object);
gboolean gst_mf_source_object_set_caps (GstMFSourceObject * object,
GstCaps * caps);
/* Utils */
gboolean gst_mf_source_enum_device_activate (GstMFSourceType source_type,
GList ** device_activates);
void gst_mf_device_activate_free (GstMFDeviceActivate * activate);
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstMFSourceObject, gst_object_unref)
G_END_DECLS
#endif /* __GST_MF_SOURCE_OBJECT_H__ */

View file

@ -0,0 +1,557 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/video/video.h>
#include "gstmfsourcereader.h"
#include <string.h>
#include <wrl.h>
using namespace Microsoft::WRL;
extern "C" {
GST_DEBUG_CATEGORY_EXTERN (gst_mf_source_object_debug);
#define GST_CAT_DEFAULT gst_mf_source_object_debug
}
typedef struct _GstMFStreamMediaType
{
IMFMediaType *media_type;
/* the stream index of media type */
guint stream_index;
/* the media index in the stream index */
guint media_type_index;
GstCaps *caps;
} GstMFStreamMediaType;
struct _GstMFSourceReader
{
GstMFSourceObject parent;
GMutex lock;
/* protected by lock */
GQueue *queue;
IMFMediaSource *source;
IMFSourceReader *reader;
GstCaps *supported_caps;
GList *media_types;
GstMFStreamMediaType *cur_type;
GstVideoInfo info;
gboolean flushing;
};
static void gst_mf_source_reader_constructed (GObject * object);
static void gst_mf_source_reader_finalize (GObject * object);
static gboolean gst_mf_source_reader_start (GstMFSourceObject * object);
static gboolean gst_mf_source_reader_stop (GstMFSourceObject * object);
static GstFlowReturn gst_mf_source_reader_fill (GstMFSourceObject * object,
GstBuffer * buffer);
static gboolean gst_mf_source_reader_unlock (GstMFSourceObject * object);
static gboolean gst_mf_source_reader_unlock_stop (GstMFSourceObject * object);
static GstCaps * gst_mf_source_reader_get_caps (GstMFSourceObject * object);
static gboolean gst_mf_source_reader_set_caps (GstMFSourceObject * object,
GstCaps * caps);
#define gst_mf_source_reader_parent_class parent_class
G_DEFINE_TYPE (GstMFSourceReader, gst_mf_source_reader,
GST_TYPE_MF_SOURCE_OBJECT);
static void
gst_mf_source_reader_class_init (GstMFSourceReaderClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstMFSourceObjectClass *source_class = GST_MF_SOURCE_OBJECT_CLASS (klass);
gobject_class->constructed = gst_mf_source_reader_constructed;
gobject_class->finalize = gst_mf_source_reader_finalize;
source_class->start = GST_DEBUG_FUNCPTR (gst_mf_source_reader_start);
source_class->stop = GST_DEBUG_FUNCPTR (gst_mf_source_reader_stop);
source_class->fill = GST_DEBUG_FUNCPTR (gst_mf_source_reader_fill);
source_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_source_reader_unlock);
source_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_mf_source_reader_unlock_stop);
source_class->get_caps = GST_DEBUG_FUNCPTR (gst_mf_source_reader_get_caps);
source_class->set_caps = GST_DEBUG_FUNCPTR (gst_mf_source_reader_set_caps);
}
static void
gst_mf_source_reader_init (GstMFSourceReader * self)
{
self->queue = g_queue_new ();
g_mutex_init (&self->lock);
CoInitializeEx (NULL, COINIT_MULTITHREADED);
}
static gboolean
gst_mf_enum_media_type_from_source_reader (IMFSourceReader * source_reader,
GList ** media_types)
{
gint i, j;
HRESULT hr;
GList *list = NULL;
g_return_val_if_fail (source_reader != NULL, FALSE);
g_return_val_if_fail (media_types != NULL, FALSE);
for (i = 0;; i++) {
for (j = 0;; j++) {
ComPtr<IMFMediaType> media_type;
hr = source_reader->GetNativeMediaType (i, j, &media_type);
if (SUCCEEDED (hr)) {
GstMFStreamMediaType *mtype;
GstCaps *caps = NULL;
caps = gst_mf_media_type_to_caps (media_type.Get ());
/* unknown format */
if (!caps)
continue;
mtype = g_new0 (GstMFStreamMediaType, 1);
mtype->media_type = media_type.Detach ();
mtype->stream_index = i;
mtype->media_type_index = j;
mtype->caps = caps;
GST_DEBUG ("StreamIndex %d, MediaTypeIndex %d, %" GST_PTR_FORMAT,
i, j, caps);
list = g_list_prepend (list, mtype);
} else if (hr == MF_E_NO_MORE_TYPES) {
/* no more media type in this stream index, try next stream index */
break;
} else if (hr == MF_E_INVALIDSTREAMNUMBER) {
/* no more streams and media types */
goto done;
} else {
/* undefined return */
goto done;
}
}
}
done:
list = g_list_reverse (list);
*media_types = list;
return ! !list;
}
static void
gst_mf_stream_media_type_free (GstMFStreamMediaType * media_type)
{
g_return_if_fail (media_type != NULL);
if (media_type->media_type)
media_type->media_type->Release ();
if (media_type->caps)
gst_caps_unref (media_type->caps);
g_free (media_type);
}
static gboolean
gst_mf_source_reader_create (GstMFSourceReader * self, IMFActivate * activate)
{
GList *iter;
HRESULT hr;
ComPtr<IMFSourceReader> reader;
ComPtr<IMFMediaSource> source;
ComPtr<IMFAttributes> attr;
hr = activate->ActivateObject (IID_IMFMediaSource, (void **) &source);
if (!gst_mf_result (hr))
return FALSE;
hr = MFCreateAttributes (&attr, 2);
if (!gst_mf_result (hr))
return FALSE;
hr = attr->SetUINT32 (MF_READWRITE_DISABLE_CONVERTERS, TRUE);
if (!gst_mf_result (hr))
return FALSE;
hr = MFCreateSourceReaderFromMediaSource (source.Get (),
attr.Get (), &reader);
if (!gst_mf_result (hr))
return FALSE;
if (!gst_mf_enum_media_type_from_source_reader (reader.Get (),
&self->media_types)) {
GST_ERROR_OBJECT (self, "No available media types");
source->Shutdown ();
return FALSE;
}
self->source = source.Detach ();
self->reader = reader.Detach ();
for (iter = self->media_types; iter; iter = g_list_next (iter)) {
GstMFStreamMediaType *mtype = (GstMFStreamMediaType *) iter->data;
if (!self->supported_caps)
self->supported_caps = gst_caps_ref (mtype->caps);
else
self->supported_caps =
gst_caps_merge (self->supported_caps, gst_caps_ref (mtype->caps));
}
GST_DEBUG_OBJECT (self, "Available output caps %" GST_PTR_FORMAT,
self->supported_caps);
return TRUE;
}
static void
gst_mf_source_reader_constructed (GObject * object)
{
GstMFSourceObject *source = GST_MF_SOURCE_OBJECT (object);
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
GList *activate_list = NULL;
GstMFDeviceActivate *target = NULL;
GList *iter;
if (!gst_mf_source_enum_device_activate (source->soure_type, &activate_list)) {
GST_WARNING_OBJECT (self, "No available video capture device");
return;
}
#ifndef GST_DISABLE_GST_DEBUG
for (iter = activate_list; iter; iter = g_list_next (iter)) {
GstMFDeviceActivate *activate = (GstMFDeviceActivate *) iter->data;
GST_DEBUG_OBJECT (self, "device %d, name: \"%s\", path: \"%s\"",
activate->index, GST_STR_NULL (activate->name),
GST_STR_NULL (activate->path));
}
#endif
for (iter = activate_list; iter; iter = g_list_next (iter)) {
GstMFDeviceActivate *activate = (GstMFDeviceActivate *) iter->data;
gboolean match;
if (source->device_path && strlen (source->device_path) > 0) {
match = g_ascii_strcasecmp (activate->path, source->device_path) == 0;
} else if (source->device_name && strlen (source->device_name) > 0) {
match = g_ascii_strcasecmp (activate->name, source->device_name) == 0;
} else if (source->device_index >= 0) {
match = activate->index == source->device_index;
} else {
/* pick the first entry */
match = TRUE;
}
if (match) {
target = activate;
break;
}
}
if (target)
gst_mf_source_reader_create (self, target->handle);
if (activate_list)
g_list_free_full (activate_list,
(GDestroyNotify) gst_mf_device_activate_free);
}
static void
release_mf_buffer (IMFMediaBuffer * buffer)
{
if (buffer)
buffer->Release ();
}
static void
gst_mf_source_reader_finalize (GObject * object)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
gst_clear_caps (&self->supported_caps);
if (self->media_types)
g_list_free_full (self->media_types,
(GDestroyNotify) gst_mf_stream_media_type_free);
gst_mf_source_reader_stop (GST_MF_SOURCE_OBJECT (self));
g_queue_free_full (self->queue, (GDestroyNotify) release_mf_buffer);
if (self->reader)
self->reader->Release ();
if (self->source) {
self->source->Shutdown ();
self->source->Release ();
}
g_mutex_clear (&self->lock);
CoUninitialize ();
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_mf_source_reader_start (GstMFSourceObject * object)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
HRESULT hr;
GstMFStreamMediaType *type;
if (!self->cur_type) {
GST_ERROR_OBJECT (self, "MediaType wasn't specified");
return FALSE;
}
type = self->cur_type;
hr = type->media_type->SetUINT32 (MF_MT_DEFAULT_STRIDE,
GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0));
if (!gst_mf_result (hr))
return FALSE;
hr = self->reader->SetStreamSelection (type->stream_index, TRUE);
if (!gst_mf_result (hr))
return FALSE;
hr = self->reader->SetCurrentMediaType (type->stream_index,
NULL, type->media_type);
if (!gst_mf_result (hr))
return FALSE;
return TRUE;
}
static gboolean
gst_mf_source_reader_stop (GstMFSourceObject * object)
{
return TRUE;
}
static GstFlowReturn
gst_mf_source_reader_read_sample (GstMFSourceReader * self)
{
HRESULT hr;
DWORD count = 0, i;
DWORD stream_flags = 0;
GstMFStreamMediaType *type = self->cur_type;
ComPtr<IMFSample> sample;
hr = self->reader->ReadSample (type->stream_index, 0, NULL, &stream_flags,
NULL, &sample);
if (!gst_mf_result (hr))
return GST_FLOW_ERROR;
if ((stream_flags & MF_SOURCE_READERF_ERROR) == MF_SOURCE_READERF_ERROR)
return GST_FLOW_ERROR;
if (!sample)
return GST_FLOW_OK;
hr = sample->GetBufferCount (&count);
if (!gst_mf_result (hr) || !count)
return GST_FLOW_OK;
for (i = 0; i < count; i++) {
IMFMediaBuffer *buffer = NULL;
hr = sample->GetBufferByIndex (i, &buffer);
if (!gst_mf_result (hr) || !buffer)
continue;
g_queue_push_tail (self->queue, buffer);
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
GstFlowReturn ret = GST_FLOW_OK;
HRESULT hr;
GstVideoFrame frame;
BYTE *data;
gint i, j;
ComPtr<IMFMediaBuffer> media_buffer;
while (g_queue_is_empty (self->queue)) {
ret = gst_mf_source_reader_read_sample (self);
if (ret != GST_FLOW_OK)
return ret;
g_mutex_lock (&self->lock);
if (self->flushing) {
g_mutex_unlock (&self->lock);
return GST_FLOW_FLUSHING;
}
g_mutex_unlock (&self->lock);
}
media_buffer.Attach ((IMFMediaBuffer *) g_queue_pop_head (self->queue));
hr = media_buffer->Lock (&data, NULL, NULL);
if (!gst_mf_result (hr)) {
GST_ERROR_OBJECT (self, "Failed to lock media buffer");
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_WRITE)) {
GST_ERROR_OBJECT (self, "Failed to map buffer");
media_buffer->Unlock ();
return GST_FLOW_ERROR;
}
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->info); i++) {
guint8 *src, *dst;
gint src_stride, dst_stride;
gint width;
src = data + GST_VIDEO_INFO_PLANE_OFFSET (&self->info, i);
dst = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame, i);
src_stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, i);
dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&frame, i);
width = GST_VIDEO_INFO_COMP_WIDTH (&self->info, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (&self->info, i);
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
memcpy (dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
gst_video_frame_unmap (&frame);
media_buffer->Unlock ();
return ret;
}
static gboolean
gst_mf_source_reader_unlock (GstMFSourceObject * object)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
g_mutex_lock (&self->lock);
self->flushing = TRUE;
g_mutex_unlock (&self->lock);
return TRUE;
}
static gboolean
gst_mf_source_reader_unlock_stop (GstMFSourceObject * object)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
g_mutex_lock (&self->lock);
self->flushing = FALSE;
g_mutex_unlock (&self->lock);
return TRUE;
}
static GstCaps *
gst_mf_source_reader_get_caps (GstMFSourceObject * object)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
if (self->supported_caps)
return gst_caps_ref (self->supported_caps);
return NULL;
}
static gboolean
gst_mf_source_reader_set_caps (GstMFSourceObject * object, GstCaps * caps)
{
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
GList *iter;
GstMFStreamMediaType *best_type = NULL;
for (iter = self->media_types; iter; iter = g_list_next (iter)) {
GstMFStreamMediaType *minfo = (GstMFStreamMediaType *) iter->data;
if (gst_caps_is_subset (minfo->caps, caps)) {
best_type = minfo;
break;
}
}
if (!best_type) {
GST_ERROR_OBJECT (self,
"Could not determine target media type with given caps %"
GST_PTR_FORMAT, caps);
return FALSE;
}
self->cur_type = best_type;
gst_video_info_from_caps (&self->info, best_type->caps);
return TRUE;
}
GstMFSourceObject *
gst_mf_source_reader_new (GstMFSourceType type, gint device_index,
const gchar * device_name, const gchar * device_path)
{
GstMFSourceReader *self;
gchar *name;
gchar *path;
/* TODO: add more type */
g_return_val_if_fail (type == GST_MF_SOURCE_TYPE_VIDEO, NULL);
name = device_name ? g_strdup (device_name) : g_strdup ("");
path = device_path ? g_strdup (device_path) : g_strdup ("");
self = (GstMFSourceReader *) g_object_new (GST_TYPE_MF_SOURCE_READER,
"source-type", type, "device-index", device_index, "device-name", name,
"device-path", path, NULL);
gst_object_ref_sink (self);
g_free (name);
g_free (path);
if (!self->source) {
gst_clear_object (&self);
return NULL;
}
return GST_MF_SOURCE_OBJECT (self);
}

View file

@ -0,0 +1,40 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_SOURCE_READER_H__
#define __GST_MF_SOURCE_READER_H__
#include <gst/gst.h>
#include "gstmfsourceobject.h"
G_BEGIN_DECLS
#define GST_TYPE_MF_SOURCE_READER (gst_mf_source_reader_get_type())
G_DECLARE_FINAL_TYPE (GstMFSourceReader, gst_mf_source_reader,
GST, MF_SOURCE_READER, GstMFSourceObject);
GstMFSourceObject * gst_mf_source_reader_new (GstMFSourceType type,
gint device_index,
const gchar * device_name,
const gchar * device_path);
G_END_DECLS
#endif /* __GST_MF_SOURCE_READER_H__ */

View file

@ -0,0 +1,389 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstmfutils.h"
#include <wrl.h>
using namespace Microsoft::WRL;
extern "C" {
GST_DEBUG_CATEGORY_EXTERN (gst_mf_utils_debug);
#define GST_CAT_DEFAULT gst_mf_utils_debug
}
#define MAKE_RAW_FORMAT_CAPS(format) \
"video/x-raw, format = (string) " format
static struct
{
const GUID &mf_format;
const gchar *caps_string;
GstVideoFormat format;
} raw_video_format_map[] = {
{MFVideoFormat_RGB32, MAKE_RAW_FORMAT_CAPS ("BGRx"), GST_VIDEO_FORMAT_BGRx},
{MFVideoFormat_ARGB32, MAKE_RAW_FORMAT_CAPS ("BGRA"), GST_VIDEO_FORMAT_BGRA},
{MFVideoFormat_RGB24, MAKE_RAW_FORMAT_CAPS ("BGR"), GST_VIDEO_FORMAT_BGR},
{MFVideoFormat_RGB555, MAKE_RAW_FORMAT_CAPS ("RGB15"), GST_VIDEO_FORMAT_RGB15},
{MFVideoFormat_RGB565, MAKE_RAW_FORMAT_CAPS ("RGB16"), GST_VIDEO_FORMAT_RGB16},
{MFVideoFormat_AYUV, MAKE_RAW_FORMAT_CAPS ("VUYA"), GST_VIDEO_FORMAT_VUYA},
{MFVideoFormat_YUY2, MAKE_RAW_FORMAT_CAPS ("YUY2"), GST_VIDEO_FORMAT_YUY2},
{MFVideoFormat_YVYU, MAKE_RAW_FORMAT_CAPS ("YVYU"), GST_VIDEO_FORMAT_YVYU},
{MFVideoFormat_UYVY, MAKE_RAW_FORMAT_CAPS ("UYVY"), GST_VIDEO_FORMAT_UYVY},
{MFVideoFormat_NV12, MAKE_RAW_FORMAT_CAPS ("NV12"), GST_VIDEO_FORMAT_NV12},
{MFVideoFormat_YV12, MAKE_RAW_FORMAT_CAPS ("YV12"), GST_VIDEO_FORMAT_YV12},
{MFVideoFormat_I420, MAKE_RAW_FORMAT_CAPS ("I420"), GST_VIDEO_FORMAT_I420},
{MFVideoFormat_IYUV, MAKE_RAW_FORMAT_CAPS ("I420"), GST_VIDEO_FORMAT_I420},
{MFVideoFormat_P010, MAKE_RAW_FORMAT_CAPS ("P010"), GST_VIDEO_FORMAT_P010_10LE},
{MFVideoFormat_P016, MAKE_RAW_FORMAT_CAPS ("P016"), GST_VIDEO_FORMAT_P016_LE},
{MFVideoFormat_v210, MAKE_RAW_FORMAT_CAPS ("v210"), GST_VIDEO_FORMAT_v210},
{MFVideoFormat_v216, MAKE_RAW_FORMAT_CAPS ("v216"), GST_VIDEO_FORMAT_v216},
};
static struct
{
const GUID &mf_format;
const gchar *caps_string;
} encoded_video_format_map[] = {
{MFVideoFormat_H264, "video/x-h264"},
{MFVideoFormat_HEVC, "video/x-h265"},
{MFVideoFormat_H265, "video/x-h265"},
{MFVideoFormat_VP80, "video/x-vp8"},
{MFVideoFormat_VP90, "video/x-vp9"},
};
GstVideoFormat
gst_mf_video_subtype_to_video_format (const GUID * subtype)
{
gint i;
for (i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
if (IsEqualGUID (raw_video_format_map[i].mf_format, *subtype))
return raw_video_format_map[i].format;
}
return GST_VIDEO_FORMAT_UNKNOWN;
}
const GUID *
gst_mf_video_subtype_from_video_format (GstVideoFormat format)
{
gint i;
for (i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
if (raw_video_format_map[i].format == format)
return &raw_video_format_map[i].mf_format;
}
return NULL;
}
static GstCaps *
gst_mf_media_type_to_video_caps (IMFMediaType * media_type)
{
HRESULT hr;
GstCaps *caps = NULL;
gint i;
guint32 width = 0;
guint32 height = 0;
guint32 num, den;
guint32 val;
gchar *str;
GUID subtype;
GstVideoChromaSite chroma_site;
GstVideoColorimetry colorimetry;
gboolean raw_format = TRUE;
hr = media_type->GetGUID (MF_MT_SUBTYPE, &subtype);
if (FAILED (hr)) {
GST_WARNING ("Failed to get subtype, hr: 0x%x", (guint) hr);
return NULL;
}
for (i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
if (IsEqualGUID (raw_video_format_map[i].mf_format, subtype)) {
caps = gst_caps_from_string (raw_video_format_map[i].caps_string);
break;
}
}
if (!caps) {
for (i = 0; i < G_N_ELEMENTS (encoded_video_format_map); i++) {
if (IsEqualGUID (encoded_video_format_map[i].mf_format, subtype)) {
caps = gst_caps_from_string (encoded_video_format_map[i].caps_string);
raw_format = FALSE;
break;
}
}
}
if (!caps) {
GST_WARNING ("Unknown format %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (subtype.Data1));
return NULL;
}
if (raw_format) {
hr = MFGetAttributeSize (media_type, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED (hr) || !width || !height) {
GST_WARNING ("Couldn't get frame size, hr: 0x%x", (guint) hr);
gst_caps_unref (caps);
return NULL;
}
}
if (width > 0 && height > 0) {
gst_caps_set_simple (caps, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, NULL);
}
hr = MFGetAttributeRatio (media_type, MF_MT_FRAME_RATE, &num, &den);
if (SUCCEEDED (hr) && num > 0 && den > 0)
gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, num, den, NULL);
hr = MFGetAttributeRatio (media_type, MF_MT_PIXEL_ASPECT_RATIO, &num, &den);
if (SUCCEEDED (hr) && num > 0 && den > 0)
gst_caps_set_simple (caps,
"pixel-aspect-ratio", GST_TYPE_FRACTION, num, den, NULL);
colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
hr = media_type->GetUINT32 (MF_MT_VIDEO_NOMINAL_RANGE, &val);
if (SUCCEEDED (hr)) {
switch (val) {
case MFNominalRange_0_255:
colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
break;
case MFNominalRange_16_235:
colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
break;
default:
break;
}
}
hr = media_type->GetUINT32 (MF_MT_VIDEO_PRIMARIES, &val);
if (SUCCEEDED (hr)) {
switch (val) {
case MFVideoPrimaries_BT709:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
break;
case MFVideoPrimaries_BT470_2_SysM:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
break;
case MFVideoPrimaries_BT470_2_SysBG:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
break;
case MFVideoPrimaries_SMPTE170M:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
break;
case MFVideoPrimaries_SMPTE240M:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
break;
case MFVideoPrimaries_EBU3213:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_EBU3213;
break;
case MFVideoPrimaries_BT2020:
colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
break;
default:
GST_FIXME ("unhandled color primaries %d", val);
break;
}
}
hr = media_type->GetUINT32 (MF_MT_YUV_MATRIX, &val);
if (SUCCEEDED (hr)) {
switch (val) {
case MFVideoTransferMatrix_BT709:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709;
break;
case MFVideoTransferMatrix_BT601:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
break;
case MFVideoTransferMatrix_SMPTE240M:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
break;
case MFVideoTransferMatrix_BT2020_10:
case MFVideoTransferMatrix_BT2020_12:
colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
break;
default:
GST_FIXME ("unhandled color matrix %d", val);
break;
}
}
hr = media_type->GetUINT32 (MF_MT_TRANSFER_FUNCTION, &val);
if (SUCCEEDED (hr)) {
switch (val) {
case MFVideoTransFunc_10:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
break;
case MFVideoTransFunc_18:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA18;
break;
case MFVideoTransFunc_20:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA20;
break;
case MFVideoTransFunc_22:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA22;
break;
case MFVideoTransFunc_709:
case MFVideoTransFunc_709_sym:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
break;
case MFVideoTransFunc_240M:
colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE240M;
break;
case MFVideoTransFunc_sRGB:
colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB;
break;
case MFVideoTransFunc_28:
colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA28;
break;
case MFVideoTransFunc_Log_100:
colorimetry.transfer = GST_VIDEO_TRANSFER_LOG100;
break;
case MFVideoTransFunc_Log_316:
colorimetry.transfer = GST_VIDEO_TRANSFER_LOG316;
break;
case MFVideoTransFunc_2020_const:
case MFVideoTransFunc_2020:
colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_10;
break;
case MFVideoTransFunc_2084:
colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE2084;
break;
case MFVideoTransFunc_HLG:
colorimetry.transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67;
break;
default:
GST_FIXME ("unhandled color transfer %d", val);
break;
}
}
str = gst_video_colorimetry_to_string (&colorimetry);
if (str) {
gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, str, NULL);
g_free (str);
str = NULL;
}
chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN;
hr = media_type->GetUINT32 (MF_MT_VIDEO_CHROMA_SITING, &val);
if (SUCCEEDED (hr)) {
GST_LOG ("have chroma site 0x%x", val);
if ((val & MFVideoChromaSubsampling_MPEG2) ==
MFVideoChromaSubsampling_MPEG2) {
chroma_site = GST_VIDEO_CHROMA_SITE_MPEG2;
} else if ((val & MFVideoChromaSubsampling_DV_PAL) ==
MFVideoChromaSubsampling_DV_PAL) {
chroma_site = GST_VIDEO_CHROMA_SITE_DV;
} else if ((val & MFVideoChromaSubsampling_Cosited) ==
MFVideoChromaSubsampling_Cosited) {
chroma_site = GST_VIDEO_CHROMA_SITE_COSITED;
} else {
GST_FIXME ("unhandled chroma site 0x%x", val);
}
}
if (chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN)
gst_caps_set_simple (caps, "chroma-site", G_TYPE_STRING,
gst_video_chroma_to_string (chroma_site), NULL);
return caps;
}
GstCaps *
gst_mf_media_type_to_caps (IMFMediaType * media_type)
{
GUID major_type;
HRESULT hr;
g_return_val_if_fail (media_type != NULL, NULL);
hr = media_type->GetMajorType (&major_type);
if (FAILED (hr)) {
GST_WARNING ("failed to get major type, hr: 0x%x", (guint) hr);
return NULL;
}
if (IsEqualGUID (major_type, MFMediaType_Video))
return gst_mf_media_type_to_video_caps (media_type);
return NULL;
}
static gchar *
gst_mf_hr_to_string (HRESULT hr)
{
DWORD flags;
gchar *ret_text;
LPTSTR error_text = NULL;
flags = FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER
| FORMAT_MESSAGE_IGNORE_INSERTS;
FormatMessage (flags, NULL, hr, MAKELANGID (LANG_NEUTRAL, SUBLANG_DEFAULT),
(LPTSTR) & error_text, 0, NULL);
#ifdef UNICODE
ret_text = g_utf16_to_utf8 ((const gunichar2 *) error_text,
-1, NULL, NULL, NULL);
#else
ret_text = g_strdup (error_text);
#endif
LocalFree (error_text);
return ret_text;
}
gboolean
_gst_mf_result (HRESULT hr, GstDebugCategory * cat, const gchar * file,
const gchar * function, gint line)
{
#ifndef GST_DISABLE_GST_DEBUG
gboolean ret = TRUE;
if (FAILED (hr)) {
gchar *error_text = NULL;
error_text = gst_mf_hr_to_string (hr);
gst_debug_log (cat, GST_LEVEL_WARNING, file, function, line,
NULL, "MediaFoundation call failed: 0x%x, %s", (guint) hr, error_text);
g_free (error_text);
ret = FALSE;
}
return ret;
#else
return SUCCEEDED (hr);
#endif
}

View file

@ -0,0 +1,62 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_UTILS_H__
#define __GST_MF_UTILS_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#ifndef INITGUID
#include <initguid.h>
#endif
#include <windows.h>
#include <mfidl.h>
#include <mfapi.h>
#include <mfreadwrite.h>
#include <mferror.h>
#include <mfobjects.h>
#include <strmif.h>
G_BEGIN_DECLS
#define GST_MF_VIDEO_FORMATS \
"{ BGRx, BGRA, BGR, RGB15, RGB16, VUYA, YUY2, YVYU, UYVY, NV12, YV12, I420," \
" P010, P016, v210, v216 }"
GstVideoFormat gst_mf_video_subtype_to_video_format (const GUID *subtype);
const GUID * gst_mf_video_subtype_from_video_format (GstVideoFormat format);
GstCaps * gst_mf_media_type_to_caps (IMFMediaType * media_type);
gboolean _gst_mf_result (HRESULT hr,
GstDebugCategory * cat,
const gchar * file,
const gchar * function,
gint line);
#define gst_mf_result(result) \
_gst_mf_result (result, GST_CAT_DEFAULT, __FILE__, GST_FUNCTION, __LINE__)
G_END_DECLS
#endif /* __GST_MF_UTILS_H__ */

View file

@ -0,0 +1,382 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-mfvideosrc
* @title: mfvideosrc
*
* Provides video capture from the Microsoft Media Foundation API.
*
* ## Example pipelines
* |[
* gst-launch-1.0 -v mfvideosrc ! fakesink
* ]| Capture from the default video capture device and render to fakesink.
*
* |[
* gst-launch-1.0 -v mfvideosrc device-index=1 ! fakesink
* ]| Capture from the second video device (if available) and render to fakesink.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstmfconfig.h"
#include "gstmfvideosrc.h"
#include "gstmfutils.h"
#include "gstmfsourcereader.h"
#if HAVE_CAPTURE_ENGINE
#include "gstmfcaptureengine.h"
#endif
#include <string.h>
GST_DEBUG_CATEGORY (gst_mf_video_src_debug);
#define GST_CAT_DEFAULT gst_mf_video_src_debug
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_MF_VIDEO_FORMATS))
);
struct _GstMFVideoSrc
{
GstPushSrc parent;
GstMFSourceObject *source;
gboolean started;
GstVideoInfo info;
GstClockTime first_pts;
guint64 n_frames;
/* properties */
gchar *device_path;
gchar *device_name;
gint device_index;
};
enum
{
PROP_0,
PROP_DEVICE_PATH,
PROP_DEVICE_NAME,
PROP_DEVICE_INDEX,
};
#define DEFAULT_DEVICE_PATH NULL
#define DEFAULT_DEVICE_NAME NULL
#define DEFAULT_DEVICE_INDEX -1
static void gst_mf_video_src_finalize (GObject * object);
static void gst_mf_video_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_mf_video_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static gboolean gst_mf_video_src_start (GstBaseSrc * src);
static gboolean gst_mf_video_src_stop (GstBaseSrc * src);
static gboolean gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps);
static GstCaps *gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter);
static GstCaps *gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
static gboolean gst_mf_video_src_unlock (GstBaseSrc * src);
static gboolean gst_mf_video_src_unlock_stop (GstBaseSrc * src);
static GstFlowReturn gst_mf_video_src_fill (GstPushSrc * pushsrc,
GstBuffer * buffer);
#define gst_mf_video_src_parent_class parent_class
G_DEFINE_TYPE (GstMFVideoSrc, gst_mf_video_src, GST_TYPE_PUSH_SRC);
static void
gst_mf_video_src_class_init (GstMFVideoSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS (klass);
gobject_class->finalize = gst_mf_video_src_finalize;
gobject_class->get_property = gst_mf_video_src_get_property;
gobject_class->set_property = gst_mf_video_src_set_property;
g_object_class_install_property (gobject_class, PROP_DEVICE_PATH,
g_param_spec_string ("device-path", "Device Path",
"The device path", DEFAULT_DEVICE_PATH,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
g_param_spec_string ("device-name", "Device Name",
"The human-readable device name", DEFAULT_DEVICE_NAME,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
g_param_spec_int ("device-index", "Device Index",
"The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));
gst_element_class_set_static_metadata (element_class,
"Media Foundation Video Source",
"Source/Video/Hardware",
"Capture video stream through Windows Media Foundation",
"Seungha Yang <seungha.yang@navercorp.com>");
gst_element_class_add_static_pad_template (element_class, &src_template);
basesrc_class->start = GST_DEBUG_FUNCPTR (gst_mf_video_src_start);
basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_stop);
basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_set_caps);
basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_get_caps);
basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_mf_video_src_fixate);
basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock);
basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock_stop);
pushsrc_class->fill = GST_DEBUG_FUNCPTR (gst_mf_video_src_fill);
GST_DEBUG_CATEGORY_INIT (gst_mf_video_src_debug, "mfvideosrc", 0,
"mfvideosrc");
}
static void
gst_mf_video_src_init (GstMFVideoSrc * self)
{
gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
gst_base_src_set_do_timestamp (GST_BASE_SRC (self), TRUE);
self->device_index = DEFAULT_DEVICE_INDEX;
}
static void
gst_mf_video_src_finalize (GObject * object)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
g_free (self->device_name);
g_free (self->device_path);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_mf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
switch (prop_id) {
case PROP_DEVICE_PATH:
g_value_set_string (value, self->device_path);
break;
case PROP_DEVICE_NAME:
g_value_set_string (value, self->device_name);
break;
case PROP_DEVICE_INDEX:
g_value_set_int (value, self->device_index);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_mf_video_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
switch (prop_id) {
case PROP_DEVICE_PATH:
g_free (self->device_path);
self->device_path = g_value_dup_string (value);
break;
case PROP_DEVICE_NAME:
g_free (self->device_name);
self->device_name = g_value_dup_string (value);
break;
case PROP_DEVICE_INDEX:
self->device_index = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
gst_mf_video_src_start (GstBaseSrc * src)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
GST_DEBUG_OBJECT (self, "Start");
#if HAVE_CAPTURE_ENGINE
self->source = gst_mf_capture_engine_new (GST_MF_SOURCE_TYPE_VIDEO,
self->device_index, self->device_name, self->device_path);
#endif
if (!self->source)
self->source = gst_mf_source_reader_new (GST_MF_SOURCE_TYPE_VIDEO,
self->device_index, self->device_name, self->device_path);
self->first_pts = GST_CLOCK_TIME_NONE;
self->n_frames = 0;
return ! !self->source;
}
static gboolean
gst_mf_video_src_stop (GstBaseSrc * src)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
GST_DEBUG_OBJECT (self, "Stop");
if (self->source) {
gst_mf_source_object_stop (self->source);
gst_object_unref (self->source);
self->source = NULL;
}
self->started = FALSE;
return TRUE;
}
static gboolean
gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
GST_DEBUG_OBJECT (self, "Set caps %" GST_PTR_FORMAT, caps);
if (!self->source) {
GST_ERROR_OBJECT (self, "No capture engine yet");
return FALSE;
}
if (!gst_mf_source_object_set_caps (self->source, caps)) {
GST_ERROR_OBJECT (self, "CaptureEngine couldn't accept caps");
return FALSE;
}
gst_video_info_from_caps (&self->info, caps);
gst_base_src_set_blocksize (src, GST_VIDEO_INFO_SIZE (&self->info));
return TRUE;
}
static GstCaps *
gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
GstCaps *caps = NULL;
if (self->source)
caps = gst_mf_source_object_get_caps (self->source);
if (!caps)
caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src));
if (filter) {
GstCaps *filtered =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = filtered;
}
GST_DEBUG_OBJECT (self, "Returning caps %" GST_PTR_FORMAT, caps);
return caps;
}
static GstCaps *
gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
GstStructure *structure;
GstCaps *fixated_caps;
gint i;
fixated_caps = gst_caps_make_writable (caps);
for (i = 0; i < gst_caps_get_size (fixated_caps); ++i) {
structure = gst_caps_get_structure (fixated_caps, i);
gst_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
G_MAXINT, 1);
}
fixated_caps = gst_caps_fixate (fixated_caps);
return fixated_caps;
}
static gboolean
gst_mf_video_src_unlock (GstBaseSrc * src)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
if (self->source)
gst_mf_source_object_set_flushing (self->source, TRUE);
return TRUE;
}
static gboolean
gst_mf_video_src_unlock_stop (GstBaseSrc * src)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
if (self->source)
gst_mf_source_object_set_flushing (self->source, FALSE);
return TRUE;
}
static GstFlowReturn
gst_mf_video_src_fill (GstPushSrc * pushsrc, GstBuffer * buffer)
{
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (pushsrc);
GstFlowReturn ret = GST_FLOW_OK;
if (!self->started) {
if (!gst_mf_source_object_start (self->source)) {
GST_ERROR_OBJECT (self, "Failed to start capture object");
return GST_FLOW_ERROR;
}
self->started = TRUE;
}
ret = gst_mf_source_object_fill (self->source, buffer);
if (ret != GST_FLOW_OK)
return ret;
GST_BUFFER_OFFSET (buffer) = self->n_frames;
GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET (buffer) + 1;
self->n_frames++;
return GST_FLOW_OK;
}

View file

@ -0,0 +1,36 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_VIDEO_SRC_H__
#define __GST_MF_VIDEO_SRC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/base/gstpushsrc.h>
G_BEGIN_DECLS
#define GST_TYPE_MF_VIDEO_SRC (gst_mf_video_src_get_type())
G_DECLARE_FINAL_TYPE (GstMFVideoSrc, gst_mf_video_src, GST, MF_VIDEO_SRC,
GstPushSrc);
G_END_DECLS
#endif /* __GST_MF_VIDEO_SRC_H__ */

View file

@ -0,0 +1,102 @@
mf_sources = [
'plugin.c',
'gstmfutils.cpp',
'gstmfvideosrc.c',
'gstmfsourceobject.c',
'gstmfsourcereader.cpp',
]
mf_header_deps = [
'mfidl.h',
'mfapi.h',
'mfreadwrite.h',
'mferror.h',
'strmif.h',
'mfobjects.h',
]
winapi_desktop = false
have_capture_engine = false
extra_c_args = ['-DCOBJMACROS']
mf_lib_deps = []
mf_config = configuration_data()
mf_option = get_option('mediafoundation')
if host_system != 'windows' or mf_option.disabled()
subdir_done()
endif
if cc.get_id() != 'msvc'
if mf_option.enabled()
error('mediafoundation plugin can only be built with MSVC')
endif
subdir_done()
endif
mf_lib = cc.find_library('mf', required : mf_option)
mfplat_lib = cc.find_library('mfplat', required : mf_option)
mfreadwrite_lib = cc.find_library('mfreadwrite', required : mf_option)
mfuuid_lib = cc.find_library('mfuuid', required : mf_option)
strmiids_lib = cc.find_library('strmiids', required : mf_option)
have_mf_lib = mf_lib.found() and mfplat_lib.found() and mfreadwrite_lib.found() and mfuuid_lib.found() and strmiids_lib.found()
if not have_mf_lib
if mf_option.enabled()
error('The mediafoundation plugin was enabled explicitly, but required libraries were not found.')
endif
subdir_done()
endif
mf_lib_deps += [mf_lib, mfplat_lib, mfreadwrite_lib, mfuuid_lib, strmiids_lib]
have_mf_header = true
foreach h: mf_header_deps
if have_mf_header
have_mf_header = cc.has_header(h)
endif
endforeach
if not have_mf_header
if mf_option.enabled()
error('The mediafoundation plugin was enabled explicitly, but required headers were not found.')
endif
subdir_done()
endif
winapi_desktop = cxx.compiles('''#include <winapifamily.h>
#include <wrl.h>
#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
#error "not win32"
#endif''',
dependencies: mf_lib_deps,
name: 'checking if building for Win32')
if not winapi_desktop
if mf_option.enabled()
error('The mediafoundation plugin was enabled explicitly, build target is not desktop app.')
endif
subdir_done()
endif
have_capture_engine = cc.has_header('mfcaptureengine.h')
if have_capture_engine
mf_sources += ['gstmfcaptureengine.cpp']
endif
mf_config.set10('HAVE_CAPTURE_ENGINE', have_capture_engine)
configure_file(
output: 'gstmfconfig.h',
configuration: mf_config,
)
gstmediafoundation = library('gstmediafoundation',
mf_sources,
c_args : gst_plugins_bad_args + extra_c_args,
cpp_args : gst_plugins_bad_args,
include_directories : [configinc],
dependencies : [gstbase_dep, gstvideo_dep] + mf_lib_deps,
install : true,
install_dir : plugins_install_dir,
)
pkgconfig.generate(gstmediafoundation, install_dir : plugins_pkgconfig_install_dir)
plugins += [gstmediafoundation]

View file

@ -0,0 +1,62 @@
/* GStreamer
* Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "gstmfvideosrc.h"
#include "gstmfutils.h"
GST_DEBUG_CATEGORY (gst_mf_debug);
GST_DEBUG_CATEGORY (gst_mf_utils_debug);
GST_DEBUG_CATEGORY (gst_mf_source_object_debug);
#define GST_CAT_DEFAULT gst_mf_debug
static gboolean
plugin_init (GstPlugin * plugin)
{
HRESULT hr;
GST_DEBUG_CATEGORY_INIT (gst_mf_debug, "mf", 0, "media foundation");
GST_DEBUG_CATEGORY_INIT (gst_mf_utils_debug,
"mfutils", 0, "media foundation utility functions");
GST_DEBUG_CATEGORY_INIT (gst_mf_source_object_debug,
"mfsourceobject", 0, "mfsourceobject");
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
if (!gst_mf_result (hr)) {
GST_WARNING ("MFStartup failure, hr: 0x%x", hr);
return TRUE;
}
gst_element_register (plugin,
"mfvideosrc", GST_RANK_SECONDARY, GST_TYPE_MF_VIDEO_SRC);
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
mediafoundation,
"Microsoft Media Foundation plugin",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -13,6 +13,7 @@ subdir('fbdev')
subdir('ipcpipeline')
subdir('kms')
subdir('magicleap')
subdir('mediafoundation')
subdir('msdk')
subdir('nvcodec')
subdir('opensles')