mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-23 16:50:47 +00:00
mfvideosrc: Set timestamp on buffer when it's captured
Capture the timestamp immediately when new frame is arrived, instead of doing that on ::create() method. There would be time gap between captured time and outputting time. Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1351>
This commit is contained in:
parent
c58357fb66
commit
47bbc997f8
7 changed files with 349 additions and 76 deletions
|
@ -21,6 +21,7 @@
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include <gst/base/base.h>
|
||||||
#include <gst/video/video.h>
|
#include <gst/video/video.h>
|
||||||
#include "gstmfcapturewinrt.h"
|
#include "gstmfcapturewinrt.h"
|
||||||
#include "gstmfutils.h"
|
#include "gstmfutils.h"
|
||||||
|
@ -61,7 +62,7 @@ struct _GstMFCaptureWinRT
|
||||||
GMainLoop *loop;
|
GMainLoop *loop;
|
||||||
|
|
||||||
/* protected by lock */
|
/* protected by lock */
|
||||||
GQueue *queue;
|
GstQueueArray *queue;
|
||||||
|
|
||||||
GstCaps *supported_caps;
|
GstCaps *supported_caps;
|
||||||
GstVideoInfo info;
|
GstVideoInfo info;
|
||||||
|
@ -71,6 +72,12 @@ struct _GstMFCaptureWinRT
|
||||||
gpointer dispatcher;
|
gpointer dispatcher;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef struct _GstMFCaptureWinRTFrame
|
||||||
|
{
|
||||||
|
IMediaFrameReference * frame;
|
||||||
|
GstClockTime clock_time;
|
||||||
|
} GstMFCaptureWinRTFrame;
|
||||||
|
|
||||||
static void gst_mf_capture_winrt_constructed (GObject * object);
|
static void gst_mf_capture_winrt_constructed (GObject * object);
|
||||||
static void gst_mf_capture_winrt_finalize (GObject * object);
|
static void gst_mf_capture_winrt_finalize (GObject * object);
|
||||||
static void gst_mf_capture_winrt_get_property (GObject * object, guint prop_id,
|
static void gst_mf_capture_winrt_get_property (GObject * object, guint prop_id,
|
||||||
|
@ -87,12 +94,14 @@ static gboolean gst_mf_capture_winrt_unlock_stop (GstMFSourceObject * object);
|
||||||
static GstCaps * gst_mf_capture_winrt_get_caps (GstMFSourceObject * object);
|
static GstCaps * gst_mf_capture_winrt_get_caps (GstMFSourceObject * object);
|
||||||
static gboolean gst_mf_capture_winrt_set_caps (GstMFSourceObject * object,
|
static gboolean gst_mf_capture_winrt_set_caps (GstMFSourceObject * object,
|
||||||
GstCaps * caps);
|
GstCaps * caps);
|
||||||
static HRESULT gst_mf_capture_winrt_on_frame (ISoftwareBitmap * bitmap,
|
static HRESULT gst_mf_capture_winrt_on_frame (IMediaFrameReference * frame,
|
||||||
void * user_data);
|
void * user_data);
|
||||||
static HRESULT gst_mf_capture_winrt_on_failed (const std::string &error,
|
static HRESULT gst_mf_capture_winrt_on_failed (const std::string &error,
|
||||||
UINT32 error_code, void * user_data);
|
UINT32 error_code, void * user_data);
|
||||||
|
|
||||||
static gpointer gst_mf_capture_winrt_thread_func (GstMFCaptureWinRT * self);
|
static gpointer gst_mf_capture_winrt_thread_func (GstMFCaptureWinRT * self);
|
||||||
|
static void
|
||||||
|
gst_mf_capture_winrt_frame_clear (GstMFCaptureWinRTFrame * winrt_frame);
|
||||||
|
|
||||||
#define gst_mf_capture_winrt_parent_class parent_class
|
#define gst_mf_capture_winrt_parent_class parent_class
|
||||||
G_DEFINE_TYPE (GstMFCaptureWinRT, gst_mf_capture_winrt,
|
G_DEFINE_TYPE (GstMFCaptureWinRT, gst_mf_capture_winrt,
|
||||||
|
@ -128,7 +137,10 @@ gst_mf_capture_winrt_class_init (GstMFCaptureWinRTClass * klass)
|
||||||
static void
|
static void
|
||||||
gst_mf_capture_winrt_init (GstMFCaptureWinRT * self)
|
gst_mf_capture_winrt_init (GstMFCaptureWinRT * self)
|
||||||
{
|
{
|
||||||
self->queue = g_queue_new ();
|
self->queue =
|
||||||
|
gst_queue_array_new_for_struct (sizeof (GstMFCaptureWinRTFrame), 2);
|
||||||
|
gst_queue_array_set_clear_func (self->queue,
|
||||||
|
(GDestroyNotify) gst_mf_capture_winrt_frame_clear);
|
||||||
g_mutex_init (&self->lock);
|
g_mutex_init (&self->lock);
|
||||||
g_cond_init (&self->cond);
|
g_cond_init (&self->cond);
|
||||||
}
|
}
|
||||||
|
@ -162,7 +174,7 @@ gst_mf_capture_winrt_finalize (GObject * object)
|
||||||
g_main_loop_unref (self->loop);
|
g_main_loop_unref (self->loop);
|
||||||
g_main_context_unref (self->context);
|
g_main_context_unref (self->context);
|
||||||
|
|
||||||
g_queue_free (self->queue);
|
gst_queue_array_free (self->queue);
|
||||||
gst_clear_caps (&self->supported_caps);
|
gst_clear_caps (&self->supported_caps);
|
||||||
g_mutex_clear (&self->lock);
|
g_mutex_clear (&self->lock);
|
||||||
g_cond_clear (&self->cond);
|
g_cond_clear (&self->cond);
|
||||||
|
@ -361,11 +373,7 @@ gst_mf_capture_winrt_stop (GstMFSourceObject * object)
|
||||||
|
|
||||||
hr = self->capture->StopCapture();
|
hr = self->capture->StopCapture();
|
||||||
|
|
||||||
while (!g_queue_is_empty (self->queue)) {
|
gst_queue_array_clear (self->queue);
|
||||||
ISoftwareBitmap *buffer =
|
|
||||||
(ISoftwareBitmap *) g_queue_pop_head (self->queue);
|
|
||||||
buffer->Release ();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!gst_mf_result (hr)) {
|
if (!gst_mf_result (hr)) {
|
||||||
GST_ERROR_OBJECT (self, "Capture object doesn't want to stop capture");
|
GST_ERROR_OBJECT (self, "Capture object doesn't want to stop capture");
|
||||||
|
@ -376,10 +384,11 @@ gst_mf_capture_winrt_stop (GstMFSourceObject * object)
|
||||||
}
|
}
|
||||||
|
|
||||||
static HRESULT
|
static HRESULT
|
||||||
gst_mf_capture_winrt_on_frame (ISoftwareBitmap * bitmap,
|
gst_mf_capture_winrt_on_frame (IMediaFrameReference * frame,
|
||||||
void * user_data)
|
void * user_data)
|
||||||
{
|
{
|
||||||
GstMFCaptureWinRT *self = GST_MF_CAPTURE_WINRT (user_data);
|
GstMFCaptureWinRT *self = GST_MF_CAPTURE_WINRT (user_data);
|
||||||
|
GstMFCaptureWinRTFrame winrt_frame;
|
||||||
|
|
||||||
g_mutex_lock (&self->lock);
|
g_mutex_lock (&self->lock);
|
||||||
if (self->flushing) {
|
if (self->flushing) {
|
||||||
|
@ -387,8 +396,11 @@ gst_mf_capture_winrt_on_frame (ISoftwareBitmap * bitmap,
|
||||||
return S_OK;
|
return S_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
g_queue_push_tail (self->queue, bitmap);
|
winrt_frame.frame = frame;
|
||||||
bitmap->AddRef ();
|
winrt_frame.clock_time =
|
||||||
|
gst_mf_source_object_get_running_time (GST_MF_SOURCE_OBJECT (self));
|
||||||
|
gst_queue_array_push_tail_struct (self->queue, &winrt_frame);
|
||||||
|
frame->AddRef ();
|
||||||
|
|
||||||
g_cond_broadcast (&self->cond);
|
g_cond_broadcast (&self->cond);
|
||||||
g_mutex_unlock (&self->lock);
|
g_mutex_unlock (&self->lock);
|
||||||
|
@ -413,22 +425,19 @@ gst_mf_capture_winrt_on_failed (const std::string &error,
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_mf_capture_winrt_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
gst_mf_capture_winrt_get_video_media_frame (GstMFCaptureWinRT * self,
|
||||||
|
IVideoMediaFrame ** media_frame, GstClockTime * timestamp,
|
||||||
|
GstClockTime * duration)
|
||||||
{
|
{
|
||||||
GstMFCaptureWinRT *self = GST_MF_CAPTURE_WINRT (object);
|
GstMFCaptureWinRTFrame *winrt_frame = nullptr;
|
||||||
GstFlowReturn ret = GST_FLOW_OK;
|
IMediaFrameReference *frame_ref;
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
GstVideoFrame frame;
|
ComPtr<IReference<TimeSpan>> winrt_timestamp;
|
||||||
BYTE *data;
|
TimeSpan winrt_duration;
|
||||||
UINT32 size;
|
|
||||||
gint i, j;
|
*media_frame = nullptr;
|
||||||
ComPtr<ISoftwareBitmap> bitmap;
|
*timestamp = GST_CLOCK_TIME_NONE;
|
||||||
ComPtr<IBitmapBuffer> bitmap_buffer;
|
*duration = GST_CLOCK_TIME_NONE;
|
||||||
ComPtr<IMemoryBuffer> mem_buf;
|
|
||||||
ComPtr<IMemoryBufferReference> mem_ref;
|
|
||||||
ComPtr<Windows::Foundation::IMemoryBufferByteAccess> byte_access;
|
|
||||||
INT32 plane_count;
|
|
||||||
BitmapPlaneDescription desc[GST_VIDEO_MAX_PLANES];
|
|
||||||
|
|
||||||
g_mutex_lock (&self->lock);
|
g_mutex_lock (&self->lock);
|
||||||
if (self->got_error) {
|
if (self->got_error) {
|
||||||
|
@ -441,7 +450,8 @@ gst_mf_capture_winrt_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
return GST_FLOW_FLUSHING;
|
return GST_FLOW_FLUSHING;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (!self->flushing && !self->got_error && g_queue_is_empty (self->queue))
|
while (!self->flushing && !self->got_error &&
|
||||||
|
gst_queue_array_is_empty (self->queue))
|
||||||
g_cond_wait (&self->cond, &self->lock);
|
g_cond_wait (&self->cond, &self->lock);
|
||||||
|
|
||||||
if (self->got_error) {
|
if (self->got_error) {
|
||||||
|
@ -454,9 +464,67 @@ gst_mf_capture_winrt_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
return GST_FLOW_FLUSHING;
|
return GST_FLOW_FLUSHING;
|
||||||
}
|
}
|
||||||
|
|
||||||
bitmap.Attach ((ISoftwareBitmap *) g_queue_pop_head (self->queue));
|
winrt_frame =
|
||||||
|
(GstMFCaptureWinRTFrame *) gst_queue_array_pop_head_struct (self->queue);
|
||||||
|
|
||||||
|
frame_ref = winrt_frame->frame;
|
||||||
|
g_assert (frame_ref);
|
||||||
|
|
||||||
|
hr = frame_ref->get_VideoMediaFrame (media_frame);
|
||||||
|
if (!gst_mf_result (hr)) {
|
||||||
|
GST_WARNING_OBJECT (self, "Couldn't get IVideoMediaFrame");
|
||||||
|
*media_frame = nullptr;
|
||||||
|
goto done;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr = frame_ref->get_Duration (&winrt_duration);
|
||||||
|
if (gst_mf_result (hr))
|
||||||
|
*duration = winrt_duration.Duration * 100;
|
||||||
|
|
||||||
|
*timestamp = winrt_frame->clock_time;
|
||||||
|
|
||||||
|
done:
|
||||||
|
gst_mf_capture_winrt_frame_clear (winrt_frame);
|
||||||
g_mutex_unlock (&self->lock);
|
g_mutex_unlock (&self->lock);
|
||||||
|
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
static GstFlowReturn
|
||||||
|
gst_mf_capture_winrt_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
|
{
|
||||||
|
GstMFCaptureWinRT *self = GST_MF_CAPTURE_WINRT (object);
|
||||||
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
|
HRESULT hr;
|
||||||
|
GstVideoFrame frame;
|
||||||
|
BYTE *data;
|
||||||
|
UINT32 size;
|
||||||
|
gint i, j;
|
||||||
|
ComPtr<IVideoMediaFrame> video_frame;
|
||||||
|
ComPtr<ISoftwareBitmap> bitmap;
|
||||||
|
ComPtr<IBitmapBuffer> bitmap_buffer;
|
||||||
|
ComPtr<IMemoryBuffer> mem_buf;
|
||||||
|
ComPtr<IMemoryBufferReference> mem_ref;
|
||||||
|
ComPtr<Windows::Foundation::IMemoryBufferByteAccess> byte_access;
|
||||||
|
INT32 plane_count;
|
||||||
|
BitmapPlaneDescription desc[GST_VIDEO_MAX_PLANES];
|
||||||
|
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
|
||||||
|
GstClockTime duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
do {
|
||||||
|
ret = gst_mf_capture_winrt_get_video_media_frame (self,
|
||||||
|
video_frame.ReleaseAndGetAddressOf (), ×tamp, &duration);
|
||||||
|
} while (ret == GST_FLOW_OK && !video_frame);
|
||||||
|
|
||||||
|
if (ret != GST_FLOW_OK)
|
||||||
|
return ret;
|
||||||
|
|
||||||
|
hr = video_frame->get_SoftwareBitmap (&bitmap);
|
||||||
|
if (!gst_mf_result (hr)) {
|
||||||
|
GST_ERROR_OBJECT (self, "Couldn't get ISoftwareBitmap");
|
||||||
|
return GST_FLOW_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
hr = bitmap->LockBuffer (BitmapBufferAccessMode::BitmapBufferAccessMode_Read,
|
hr = bitmap->LockBuffer (BitmapBufferAccessMode::BitmapBufferAccessMode_Read,
|
||||||
&bitmap_buffer);
|
&bitmap_buffer);
|
||||||
if (!gst_mf_result (hr)) {
|
if (!gst_mf_result (hr)) {
|
||||||
|
@ -544,6 +612,10 @@ gst_mf_capture_winrt_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
|
|
||||||
gst_video_frame_unmap (&frame);
|
gst_video_frame_unmap (&frame);
|
||||||
|
|
||||||
|
GST_BUFFER_PTS (buffer) = timestamp;
|
||||||
|
GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
|
||||||
|
GST_BUFFER_DURATION (buffer) = duration;
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -630,6 +702,19 @@ gst_mf_capture_winrt_set_caps (GstMFSourceObject * object, GstCaps * caps)
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
gst_mf_capture_winrt_frame_clear (GstMFCaptureWinRTFrame * winrt_frame)
|
||||||
|
{
|
||||||
|
if (!winrt_frame)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (winrt_frame->frame)
|
||||||
|
winrt_frame->frame->Release ();
|
||||||
|
|
||||||
|
winrt_frame->frame = nullptr;
|
||||||
|
winrt_frame->clock_time = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
GstMFSourceObject *
|
GstMFSourceObject *
|
||||||
gst_mf_capture_winrt_new (GstMFSourceType type, gint device_index,
|
gst_mf_capture_winrt_new (GstMFSourceType type, gint device_index,
|
||||||
const gchar * device_name, const gchar * device_path, gpointer dispatcher)
|
const gchar * device_name, const gchar * device_path, gpointer dispatcher)
|
||||||
|
|
|
@ -110,6 +110,8 @@ gst_mf_source_object_init (GstMFSourceObject * self)
|
||||||
{
|
{
|
||||||
self->device_index = DEFAULT_DEVICE_INDEX;
|
self->device_index = DEFAULT_DEVICE_INDEX;
|
||||||
self->source_type = DEFAULT_SOURCE_TYPE;
|
self->source_type = DEFAULT_SOURCE_TYPE;
|
||||||
|
|
||||||
|
g_weak_ref_init (&self->client, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
|
@ -120,6 +122,8 @@ gst_mf_source_object_finalize (GObject * object)
|
||||||
g_free (self->device_path);
|
g_free (self->device_path);
|
||||||
g_free (self->device_name);
|
g_free (self->device_name);
|
||||||
|
|
||||||
|
g_weak_ref_clear (&self->client);
|
||||||
|
|
||||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -274,6 +278,45 @@ gst_mf_source_object_get_caps (GstMFSourceObject * object)
|
||||||
return klass->get_caps (object);
|
return klass->get_caps (object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gboolean
|
||||||
|
gst_mf_source_object_set_client (GstMFSourceObject * object,
|
||||||
|
GstElement * client)
|
||||||
|
{
|
||||||
|
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), FALSE);
|
||||||
|
|
||||||
|
g_weak_ref_set (&object->client, client);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
GstClockTime
|
||||||
|
gst_mf_source_object_get_running_time (GstMFSourceObject * object)
|
||||||
|
{
|
||||||
|
GstElement *client = NULL;
|
||||||
|
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
g_return_val_if_fail (GST_IS_MF_SOURCE_OBJECT (object), GST_CLOCK_TIME_NONE);
|
||||||
|
|
||||||
|
client = (GstElement *) g_weak_ref_get (&object->client);
|
||||||
|
if (client) {
|
||||||
|
GstClockTime basetime = client->base_time;
|
||||||
|
GstClock *clock;
|
||||||
|
|
||||||
|
clock = gst_element_get_clock (client);
|
||||||
|
if (clock) {
|
||||||
|
GstClockTime now;
|
||||||
|
|
||||||
|
now = gst_clock_get_time (clock);
|
||||||
|
timestamp = now - basetime;
|
||||||
|
gst_object_unref (clock);
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_object_unref (client);
|
||||||
|
}
|
||||||
|
|
||||||
|
return timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_mf_source_object_use_winrt_api (void)
|
gst_mf_source_object_use_winrt_api (void)
|
||||||
{
|
{
|
||||||
|
|
|
@ -54,6 +54,8 @@ struct _GstMFSourceObject
|
||||||
gchar *device_path;
|
gchar *device_path;
|
||||||
gchar *device_name;
|
gchar *device_name;
|
||||||
gint device_index;
|
gint device_index;
|
||||||
|
|
||||||
|
GWeakRef client;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct _GstMFSourceObjectClass
|
struct _GstMFSourceObjectClass
|
||||||
|
@ -102,6 +104,11 @@ GstCaps * gst_mf_source_object_get_caps (GstMFSourceObject * object);
|
||||||
gboolean gst_mf_source_object_set_caps (GstMFSourceObject * object,
|
gboolean gst_mf_source_object_set_caps (GstMFSourceObject * object,
|
||||||
GstCaps * caps);
|
GstCaps * caps);
|
||||||
|
|
||||||
|
gboolean gst_mf_source_object_set_client (GstMFSourceObject * object,
|
||||||
|
GstElement * element);
|
||||||
|
|
||||||
|
GstClockTime gst_mf_source_object_get_running_time (GstMFSourceObject * object);
|
||||||
|
|
||||||
/* A factory method for subclass impl. selection */
|
/* A factory method for subclass impl. selection */
|
||||||
GstMFSourceObject * gst_mf_source_object_new (GstMFSourceType type,
|
GstMFSourceObject * gst_mf_source_object_new (GstMFSourceType type,
|
||||||
gint device_index,
|
gint device_index,
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include <gst/base/base.h>
|
||||||
#include <gst/video/video.h>
|
#include <gst/video/video.h>
|
||||||
#include "gstmfsourcereader.h"
|
#include "gstmfsourcereader.h"
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
@ -71,7 +72,7 @@ struct _GstMFSourceReader
|
||||||
GMainLoop *loop;
|
GMainLoop *loop;
|
||||||
|
|
||||||
/* protected by lock */
|
/* protected by lock */
|
||||||
GQueue *queue;
|
GstQueueArray *queue;
|
||||||
|
|
||||||
IMFMediaSource *source;
|
IMFMediaSource *source;
|
||||||
IMFSourceReader *reader;
|
IMFSourceReader *reader;
|
||||||
|
@ -84,6 +85,12 @@ struct _GstMFSourceReader
|
||||||
gboolean flushing;
|
gboolean flushing;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef struct _GstMFSourceReaderSample
|
||||||
|
{
|
||||||
|
IMFSample *sample;
|
||||||
|
GstClockTime clock_time;
|
||||||
|
} GstMFSourceReaderSample;
|
||||||
|
|
||||||
static void gst_mf_source_reader_constructed (GObject * object);
|
static void gst_mf_source_reader_constructed (GObject * object);
|
||||||
static void gst_mf_source_reader_finalize (GObject * object);
|
static void gst_mf_source_reader_finalize (GObject * object);
|
||||||
|
|
||||||
|
@ -98,6 +105,8 @@ static gboolean gst_mf_source_reader_unlock_stop (GstMFSourceObject * object);
|
||||||
static GstCaps * gst_mf_source_reader_get_caps (GstMFSourceObject * object);
|
static GstCaps * gst_mf_source_reader_get_caps (GstMFSourceObject * object);
|
||||||
static gboolean gst_mf_source_reader_set_caps (GstMFSourceObject * object,
|
static gboolean gst_mf_source_reader_set_caps (GstMFSourceObject * object,
|
||||||
GstCaps * caps);
|
GstCaps * caps);
|
||||||
|
static void
|
||||||
|
gst_mf_source_reader_sample_clear (GstMFSourceReaderSample * reader_sample);
|
||||||
|
|
||||||
static gboolean gst_mf_source_reader_open (GstMFSourceReader * object,
|
static gboolean gst_mf_source_reader_open (GstMFSourceReader * object,
|
||||||
IMFActivate * activate);
|
IMFActivate * activate);
|
||||||
|
@ -134,7 +143,10 @@ gst_mf_source_reader_class_init (GstMFSourceReaderClass * klass)
|
||||||
static void
|
static void
|
||||||
gst_mf_source_reader_init (GstMFSourceReader * self)
|
gst_mf_source_reader_init (GstMFSourceReader * self)
|
||||||
{
|
{
|
||||||
self->queue = g_queue_new ();
|
self->queue =
|
||||||
|
gst_queue_array_new_for_struct (sizeof (GstMFSourceReaderSample), 2);
|
||||||
|
gst_queue_array_set_clear_func (self->queue,
|
||||||
|
(GDestroyNotify) gst_mf_source_reader_sample_clear);
|
||||||
g_mutex_init (&self->lock);
|
g_mutex_init (&self->lock);
|
||||||
g_cond_init (&self->cond);
|
g_cond_init (&self->cond);
|
||||||
}
|
}
|
||||||
|
@ -343,7 +355,7 @@ gst_mf_source_reader_finalize (GObject * object)
|
||||||
g_main_loop_unref (self->loop);
|
g_main_loop_unref (self->loop);
|
||||||
g_main_context_unref (self->context);
|
g_main_context_unref (self->context);
|
||||||
|
|
||||||
g_queue_free (self->queue);
|
gst_queue_array_free (self->queue);
|
||||||
gst_clear_caps (&self->supported_caps);
|
gst_clear_caps (&self->supported_caps);
|
||||||
g_mutex_clear (&self->lock);
|
g_mutex_clear (&self->lock);
|
||||||
g_cond_clear (&self->cond);
|
g_cond_clear (&self->cond);
|
||||||
|
@ -384,15 +396,23 @@ gst_mf_source_reader_start (GstMFSourceObject * object)
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static GstMFSourceReaderSample *
|
||||||
|
gst_mf_source_reader_sample_new (IMFSample * sample, GstClockTime timestamp)
|
||||||
|
{
|
||||||
|
GstMFSourceReaderSample *reader_sample = g_new0 (GstMFSourceReaderSample, 1);
|
||||||
|
|
||||||
|
reader_sample->sample = sample;
|
||||||
|
reader_sample->clock_time = timestamp;
|
||||||
|
|
||||||
|
return reader_sample;
|
||||||
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_mf_source_reader_stop (GstMFSourceObject * object)
|
gst_mf_source_reader_stop (GstMFSourceObject * object)
|
||||||
{
|
{
|
||||||
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
|
GstMFSourceReader *self = GST_MF_SOURCE_READER (object);
|
||||||
|
|
||||||
while (!g_queue_is_empty (self->queue)) {
|
gst_queue_array_clear (self->queue);
|
||||||
IMFMediaBuffer *buffer = (IMFMediaBuffer *) g_queue_pop_head (self->queue);
|
|
||||||
buffer->Release ();
|
|
||||||
}
|
|
||||||
|
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
@ -401,47 +421,55 @@ static GstFlowReturn
|
||||||
gst_mf_source_reader_read_sample (GstMFSourceReader * self)
|
gst_mf_source_reader_read_sample (GstMFSourceReader * self)
|
||||||
{
|
{
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
DWORD count = 0, i;
|
|
||||||
DWORD stream_flags = 0;
|
DWORD stream_flags = 0;
|
||||||
GstMFStreamMediaType *type = self->cur_type;
|
GstMFStreamMediaType *type = self->cur_type;
|
||||||
ComPtr<IMFSample> sample;
|
IMFSample *sample = nullptr;
|
||||||
|
GstMFSourceReaderSample reader_sample;
|
||||||
|
|
||||||
hr = self->reader->ReadSample (type->stream_index, 0, NULL, &stream_flags,
|
hr = self->reader->ReadSample (type->stream_index, 0, NULL, &stream_flags,
|
||||||
NULL, &sample);
|
NULL, &sample);
|
||||||
|
|
||||||
if (!gst_mf_result (hr))
|
if (!gst_mf_result (hr)) {
|
||||||
|
GST_ERROR_OBJECT (self, "Failed to read sample");
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
|
|
||||||
if ((stream_flags & MF_SOURCE_READERF_ERROR) == MF_SOURCE_READERF_ERROR)
|
|
||||||
return GST_FLOW_ERROR;
|
|
||||||
|
|
||||||
if (!sample)
|
|
||||||
return GST_FLOW_OK;
|
|
||||||
|
|
||||||
hr = sample->GetBufferCount (&count);
|
|
||||||
if (!gst_mf_result (hr) || !count)
|
|
||||||
return GST_FLOW_OK;
|
|
||||||
|
|
||||||
for (i = 0; i < count; i++) {
|
|
||||||
IMFMediaBuffer *buffer = NULL;
|
|
||||||
|
|
||||||
hr = sample->GetBufferByIndex (i, &buffer);
|
|
||||||
if (!gst_mf_result (hr) || !buffer)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
g_queue_push_tail (self->queue, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ((stream_flags & MF_SOURCE_READERF_ERROR) == MF_SOURCE_READERF_ERROR) {
|
||||||
|
GST_ERROR_OBJECT (self, "Error while reading sample, sample flags 0x%x",
|
||||||
|
stream_flags);
|
||||||
|
return GST_FLOW_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sample) {
|
||||||
|
GST_WARNING_OBJECT (self, "Empty sample");
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
reader_sample.sample = sample;
|
||||||
|
reader_sample.clock_time =
|
||||||
|
gst_mf_source_object_get_running_time (GST_MF_SOURCE_OBJECT (self));
|
||||||
|
|
||||||
|
gst_queue_array_push_tail_struct (self->queue, &reader_sample);
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_mf_source_reader_get_media_buffer (GstMFSourceReader * self,
|
gst_mf_source_reader_get_media_buffer (GstMFSourceReader * self,
|
||||||
IMFMediaBuffer ** media_buffer)
|
IMFMediaBuffer ** buffer, GstClockTime * timestamp, GstClockTime * duration)
|
||||||
{
|
{
|
||||||
GstFlowReturn ret = GST_FLOW_OK;
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
|
IMFSample *sample = nullptr;
|
||||||
|
HRESULT hr;
|
||||||
|
DWORD count = 0;
|
||||||
|
LONGLONG mf_timestamp;
|
||||||
|
GstMFSourceReaderSample *reader_sample = nullptr;
|
||||||
|
|
||||||
while (g_queue_is_empty (self->queue)) {
|
*buffer = nullptr;
|
||||||
|
*timestamp = GST_CLOCK_TIME_NONE;
|
||||||
|
*duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
while (gst_queue_array_is_empty (self->queue)) {
|
||||||
ret = gst_mf_source_reader_read_sample (self);
|
ret = gst_mf_source_reader_read_sample (self);
|
||||||
if (ret != GST_FLOW_OK)
|
if (ret != GST_FLOW_OK)
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -454,7 +482,37 @@ gst_mf_source_reader_get_media_buffer (GstMFSourceReader * self,
|
||||||
g_mutex_unlock (&self->lock);
|
g_mutex_unlock (&self->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
*media_buffer = (IMFMediaBuffer *) g_queue_pop_head (self->queue);
|
reader_sample =
|
||||||
|
(GstMFSourceReaderSample *) gst_queue_array_pop_head_struct (self->queue);
|
||||||
|
sample = reader_sample->sample;
|
||||||
|
g_assert (sample);
|
||||||
|
|
||||||
|
hr = sample->GetBufferCount (&count);
|
||||||
|
if (!gst_mf_result (hr) || count == 0) {
|
||||||
|
GST_WARNING_OBJECT (self, "Empty IMFSample, read again");
|
||||||
|
goto done;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* XXX: read the first buffer and ignore the others for now */
|
||||||
|
hr = sample->GetBufferByIndex (0, buffer);
|
||||||
|
if (!gst_mf_result (hr)) {
|
||||||
|
GST_WARNING_OBJECT (self, "Couldn't get IMFMediaBuffer from sample");
|
||||||
|
goto done;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr = sample->GetSampleDuration (&mf_timestamp);
|
||||||
|
if (!gst_mf_result (hr)) {
|
||||||
|
GST_WARNING_OBJECT (self, "Couldn't get sample duration");
|
||||||
|
*duration = GST_CLOCK_TIME_NONE;
|
||||||
|
} else {
|
||||||
|
/* Media Foundation uses 100 nano seconds unit */
|
||||||
|
*duration = mf_timestamp * 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
*timestamp = reader_sample->clock_time;
|
||||||
|
|
||||||
|
done:
|
||||||
|
gst_mf_source_reader_sample_clear (reader_sample);
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
@ -469,8 +527,14 @@ gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
BYTE *data;
|
BYTE *data;
|
||||||
gint i, j;
|
gint i, j;
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
|
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
|
||||||
|
GstClockTime duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
do {
|
||||||
|
ret = gst_mf_source_reader_get_media_buffer (self,
|
||||||
|
media_buffer.ReleaseAndGetAddressOf (), ×tamp, &duration);
|
||||||
|
} while (ret == GST_FLOW_OK && !media_buffer);
|
||||||
|
|
||||||
ret = gst_mf_source_reader_get_media_buffer (self, &media_buffer);
|
|
||||||
if (ret != GST_FLOW_OK)
|
if (ret != GST_FLOW_OK)
|
||||||
return ret;
|
return ret;
|
||||||
|
|
||||||
|
@ -509,6 +573,10 @@ gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
||||||
gst_video_frame_unmap (&frame);
|
gst_video_frame_unmap (&frame);
|
||||||
media_buffer->Unlock ();
|
media_buffer->Unlock ();
|
||||||
|
|
||||||
|
GST_BUFFER_PTS (buffer) = timestamp;
|
||||||
|
GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
|
||||||
|
GST_BUFFER_DURATION (buffer) = duration;
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -523,8 +591,14 @@ gst_mf_source_reader_create (GstMFSourceObject * object, GstBuffer ** buffer)
|
||||||
DWORD len = 0;
|
DWORD len = 0;
|
||||||
GstBuffer *buf;
|
GstBuffer *buf;
|
||||||
GstMapInfo info;
|
GstMapInfo info;
|
||||||
|
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
|
||||||
|
GstClockTime duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
do {
|
||||||
|
ret = gst_mf_source_reader_get_media_buffer (self,
|
||||||
|
media_buffer.ReleaseAndGetAddressOf (), ×tamp, &duration);
|
||||||
|
} while (ret == GST_FLOW_OK && !media_buffer);
|
||||||
|
|
||||||
ret = gst_mf_source_reader_get_media_buffer (self, &media_buffer);
|
|
||||||
if (ret != GST_FLOW_OK)
|
if (ret != GST_FLOW_OK)
|
||||||
return ret;
|
return ret;
|
||||||
|
|
||||||
|
@ -547,6 +621,11 @@ gst_mf_source_reader_create (GstMFSourceObject * object, GstBuffer ** buffer)
|
||||||
|
|
||||||
media_buffer->Unlock ();
|
media_buffer->Unlock ();
|
||||||
|
|
||||||
|
GST_BUFFER_PTS (buffer) = timestamp;
|
||||||
|
/* Set DTS since this is compressed format */
|
||||||
|
GST_BUFFER_DTS (buffer) = timestamp;
|
||||||
|
GST_BUFFER_DURATION (buffer) = duration;
|
||||||
|
|
||||||
*buffer = buf;
|
*buffer = buf;
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
|
@ -811,6 +890,19 @@ gst_mf_device_activate_free (GstMFDeviceActivate * activate)
|
||||||
g_free (activate);
|
g_free (activate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
gst_mf_source_reader_sample_clear (GstMFSourceReaderSample * reader_sample)
|
||||||
|
{
|
||||||
|
if (!reader_sample)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (reader_sample->sample)
|
||||||
|
reader_sample->sample->Release ();
|
||||||
|
|
||||||
|
reader_sample->sample = nullptr;
|
||||||
|
reader_sample->clock_time = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
GstMFSourceObject *
|
GstMFSourceObject *
|
||||||
gst_mf_source_reader_new (GstMFSourceType type, gint device_index,
|
gst_mf_source_reader_new (GstMFSourceType type, gint device_index,
|
||||||
const gchar * device_name, const gchar * device_path)
|
const gchar * device_name, const gchar * device_path)
|
||||||
|
|
|
@ -73,8 +73,8 @@ struct _GstMFVideoSrc
|
||||||
gboolean started;
|
gboolean started;
|
||||||
GstVideoInfo info;
|
GstVideoInfo info;
|
||||||
|
|
||||||
GstClockTime first_pts;
|
|
||||||
guint64 n_frames;
|
guint64 n_frames;
|
||||||
|
GstClockTime latency;
|
||||||
|
|
||||||
/* properties */
|
/* properties */
|
||||||
gchar *device_path;
|
gchar *device_path;
|
||||||
|
@ -109,6 +109,7 @@ static GstCaps *gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter);
|
||||||
static GstCaps *gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
|
static GstCaps *gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
|
||||||
static gboolean gst_mf_video_src_unlock (GstBaseSrc * src);
|
static gboolean gst_mf_video_src_unlock (GstBaseSrc * src);
|
||||||
static gboolean gst_mf_video_src_unlock_stop (GstBaseSrc * src);
|
static gboolean gst_mf_video_src_unlock_stop (GstBaseSrc * src);
|
||||||
|
static gboolean gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query);
|
||||||
|
|
||||||
static GstFlowReturn gst_mf_video_src_create (GstPushSrc * pushsrc,
|
static GstFlowReturn gst_mf_video_src_create (GstPushSrc * pushsrc,
|
||||||
GstBuffer ** buffer);
|
GstBuffer ** buffer);
|
||||||
|
@ -178,6 +179,7 @@ gst_mf_video_src_class_init (GstMFVideoSrcClass * klass)
|
||||||
basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_mf_video_src_fixate);
|
basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_mf_video_src_fixate);
|
||||||
basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock);
|
basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock);
|
||||||
basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock_stop);
|
basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock_stop);
|
||||||
|
basesrc_class->query = GST_DEBUG_FUNCPTR (gst_mf_video_src_query);
|
||||||
|
|
||||||
pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_mf_video_src_create);
|
pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_mf_video_src_create);
|
||||||
|
|
||||||
|
@ -190,7 +192,6 @@ gst_mf_video_src_init (GstMFVideoSrc * self)
|
||||||
{
|
{
|
||||||
gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
|
gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
|
||||||
gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
|
gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
|
||||||
gst_base_src_set_do_timestamp (GST_BASE_SRC (self), TRUE);
|
|
||||||
|
|
||||||
self->device_index = DEFAULT_DEVICE_INDEX;
|
self->device_index = DEFAULT_DEVICE_INDEX;
|
||||||
}
|
}
|
||||||
|
@ -267,10 +268,17 @@ gst_mf_video_src_start (GstBaseSrc * src)
|
||||||
self->source = gst_mf_source_object_new (GST_MF_SOURCE_TYPE_VIDEO,
|
self->source = gst_mf_source_object_new (GST_MF_SOURCE_TYPE_VIDEO,
|
||||||
self->device_index, self->device_name, self->device_path, NULL);
|
self->device_index, self->device_name, self->device_path, NULL);
|
||||||
|
|
||||||
self->first_pts = GST_CLOCK_TIME_NONE;
|
|
||||||
self->n_frames = 0;
|
self->n_frames = 0;
|
||||||
|
self->latency = 0;
|
||||||
|
|
||||||
return ! !self->source;
|
if (!self->source) {
|
||||||
|
GST_ERROR_OBJECT (self, "Couldn't create capture object");
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_mf_source_object_set_client (self->source, GST_ELEMENT (self));
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
|
@ -383,12 +391,35 @@ gst_mf_video_src_unlock_stop (GstBaseSrc * src)
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static gboolean
|
||||||
|
gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query)
|
||||||
|
{
|
||||||
|
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
|
||||||
|
|
||||||
|
switch (GST_QUERY_TYPE (query)) {
|
||||||
|
case GST_QUERY_LATENCY:
|
||||||
|
if (self->started) {
|
||||||
|
gst_query_set_latency (query, TRUE, 0, self->latency);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return GST_BASE_SRC_CLASS (parent_class)->query (src, query);
|
||||||
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
|
gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
|
||||||
{
|
{
|
||||||
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (pushsrc);
|
GstMFVideoSrc *self = GST_MF_VIDEO_SRC (pushsrc);
|
||||||
GstFlowReturn ret = GST_FLOW_OK;
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
GstBuffer *buf = NULL;
|
GstBuffer *buf = NULL;
|
||||||
|
GstClock *clock;
|
||||||
|
GstClockTime running_time = GST_CLOCK_TIME_NONE;
|
||||||
|
GstClockTimeDiff diff;
|
||||||
|
|
||||||
if (!self->started) {
|
if (!self->started) {
|
||||||
if (!gst_mf_source_object_start (self->source)) {
|
if (!gst_mf_source_object_start (self->source)) {
|
||||||
|
@ -419,6 +450,28 @@ gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
|
||||||
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
|
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
|
||||||
self->n_frames++;
|
self->n_frames++;
|
||||||
|
|
||||||
|
GST_LOG_OBJECT (self,
|
||||||
|
"Captured buffer timestamp %" GST_TIME_FORMAT ", duration %"
|
||||||
|
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||||
|
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||||
|
|
||||||
|
/* Update latency */
|
||||||
|
clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
|
||||||
|
if (clock) {
|
||||||
|
GstClockTime now;
|
||||||
|
|
||||||
|
now = gst_clock_get_time (clock);
|
||||||
|
running_time = now - GST_ELEMENT_CAST (self)->base_time;
|
||||||
|
gst_object_unref (clock);
|
||||||
|
}
|
||||||
|
|
||||||
|
diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf), running_time);
|
||||||
|
if (diff > self->latency) {
|
||||||
|
self->latency = (GstClockTime) diff;
|
||||||
|
GST_DEBUG_OBJECT (self, "Updated latency value %" GST_TIME_FORMAT,
|
||||||
|
GST_TIME_ARGS (self->latency));
|
||||||
|
}
|
||||||
|
|
||||||
*buffer = buf;
|
*buffer = buf;
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
|
|
|
@ -1047,19 +1047,11 @@ MediaCaptureWrapper::onFrameArrived(IMediaFrameReader *reader,
|
||||||
if (!frame_ref)
|
if (!frame_ref)
|
||||||
return S_OK;
|
return S_OK;
|
||||||
|
|
||||||
hr = frame_ref->get_VideoMediaFrame (&video_frame);
|
|
||||||
if (!gst_mf_result (hr))
|
|
||||||
return hr;
|
|
||||||
|
|
||||||
hr = video_frame->get_SoftwareBitmap (&bitmap);
|
|
||||||
if (!gst_mf_result (hr) || !bitmap)
|
|
||||||
return hr;
|
|
||||||
|
|
||||||
/* nothing to do if no callback was installed */
|
/* nothing to do if no callback was installed */
|
||||||
if (!user_cb_.frame_arrived)
|
if (!user_cb_.frame_arrived)
|
||||||
return S_OK;
|
return S_OK;
|
||||||
|
|
||||||
return user_cb_.frame_arrived (bitmap.Get(), user_data_);
|
return user_cb_.frame_arrived (frame_ref.Get(), user_data_);
|
||||||
}
|
}
|
||||||
|
|
||||||
HRESULT
|
HRESULT
|
||||||
|
|
|
@ -112,7 +112,8 @@ public:
|
||||||
|
|
||||||
typedef struct
|
typedef struct
|
||||||
{
|
{
|
||||||
HRESULT (*frame_arrived) (ISoftwareBitmap * bitmap, void * user_data);
|
HRESULT (*frame_arrived) (IMediaFrameReference * frame,
|
||||||
|
void * user_data);
|
||||||
HRESULT (*failed) (const std::string &error,
|
HRESULT (*failed) (const std::string &error,
|
||||||
UINT32 error_code,
|
UINT32 error_code,
|
||||||
void * user_data);
|
void * user_data);
|
||||||
|
|
Loading…
Reference in a new issue