applemedia: port qtkitvideosrc

This commit is contained in:
Alessandro Decina 2012-02-28 08:13:04 +01:00
parent 3cf47f7a98
commit 7a4a3833c3
8 changed files with 127 additions and 161 deletions

View file

@ -313,7 +313,7 @@ GST_PLUGINS_NONPORTED=" aiff asfmux \
apexsink bz2 cdaudio celt cog curl dc1394 dirac directfb resindvd \
gsettings jp2k ladspa mimic \
musepack musicbrainz nas neon ofa openal opencv rsvg sdl smooth sndfile soundtouch spandsp timidity \
wildmidi xvid apple_media lv2 teletextdec opus dvb"
wildmidi xvid lv2 teletextdec opus dvb"
AC_SUBST(GST_PLUGINS_NONPORTED)
dnl these are all the gst plug-ins, compilable without additional libs

View file

@ -2,10 +2,6 @@ plugin_LTLIBRARIES = libgstapplemedia.la
libgstapplemedia_la_SOURCES = \
plugin.m \
vth264decbin.c \
vth264encbin.c \
vtenc.c \
vtdec.c \
vtutil.c \
bufferfactory.m \
corevideobuffer.c \
@ -82,8 +78,6 @@ else
libgstapplemedia_la_SOURCES += \
qtkitvideosrc.m \
miovideosrc.c \
miovideodevice.c \
mioapi.c
libgstapplemedia_la_LDFLAGS += \

View file

@ -19,29 +19,31 @@
#include "coremediabuffer.h"
G_DEFINE_TYPE (GstCoreMediaBuffer, gst_core_media_buffer, GST_TYPE_BUFFER);
static void
gst_core_media_buffer_init (GstCoreMediaBuffer * self)
gst_core_media_meta_free (GstCoreMediaMeta * meta, GstBuffer * buf)
{
GST_BUFFER_FLAG_SET (self, GST_BUFFER_FLAG_READONLY);
}
static void
gst_core_media_buffer_finalize (GstMiniObject * mini_object)
{
GstCoreMediaBuffer *self = GST_CORE_MEDIA_BUFFER_CAST (mini_object);
if (self->image_buf != NULL) {
GstCVApi *cv = self->ctx->cv;
cv->CVPixelBufferUnlockBaseAddress (self->image_buf,
if (meta->image_buf != NULL) {
GstCVApi *cv = meta->ctx->cv;
cv->CVPixelBufferUnlockBaseAddress (meta->image_buf,
kCVPixelBufferLock_ReadOnly);
}
self->ctx->cm->FigSampleBufferRelease (self->sample_buf);
g_object_unref (self->ctx);
meta->ctx->cm->FigSampleBufferRelease (meta->sample_buf);
g_object_unref (meta->ctx);
}
GST_MINI_OBJECT_CLASS (gst_core_media_buffer_parent_class)->finalize
(mini_object);
static const GstMetaInfo *
gst_core_media_meta_get_info (void)
{
static const GstMetaInfo *core_media_meta_info = NULL;
if (core_media_meta_info == NULL) {
core_media_meta_info = gst_meta_register ("GstCoreMediaeMeta",
"GstCoreMediaMeta", sizeof (GstCoreMediaMeta),
(GstMetaInitFunction) NULL,
(GstMetaFreeFunction) gst_core_media_meta_free,
(GstMetaTransformFunction) NULL);
}
return core_media_meta_info;
}
GstBuffer *
@ -55,7 +57,8 @@ gst_core_media_buffer_new (GstCoreMediaCtx * ctx, CMSampleBufferRef sample_buf)
Byte *data = NULL;
UInt32 size;
OSStatus status;
GstCoreMediaBuffer *buf;
GstBuffer *buf;
GstCoreMediaMeta *meta;
image_buf = cm->CMSampleBufferGetImageBuffer (sample_buf);
pixel_buf = NULL;
@ -95,33 +98,32 @@ gst_core_media_buffer_new (GstCoreMediaCtx * ctx, CMSampleBufferRef sample_buf)
goto error;
}
buf =
GST_CORE_MEDIA_BUFFER (gst_mini_object_new (GST_TYPE_CORE_MEDIA_BUFFER));
buf->ctx = g_object_ref (ctx);
buf->sample_buf = cm->FigSampleBufferRetain (sample_buf);
buf->image_buf = image_buf;
buf->pixel_buf = pixel_buf;
buf->block_buf = block_buf;
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = data;
GST_BUFFER_SIZE (buf) = size;
meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf,
gst_core_media_meta_get_info (), NULL);
meta->ctx = g_object_ref (ctx);
meta->sample_buf = cm->FigSampleBufferRetain (sample_buf);
meta->image_buf = image_buf;
meta->pixel_buf = pixel_buf;
meta->block_buf = block_buf;
return GST_BUFFER_CAST (buf);
gst_buffer_take_memory (buf, -1,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
size, 0, size, NULL, NULL));
return buf;
error:
return NULL;
}
CVPixelBufferRef
gst_core_media_buffer_get_pixel_buffer (GstCoreMediaBuffer * buf)
gst_core_media_buffer_get_pixel_buffer (GstBuffer * buf)
{
return buf->ctx->cv->CVPixelBufferRetain (buf->pixel_buf);
}
GstCoreMediaMeta *meta = (GstCoreMediaMeta *) gst_buffer_get_meta (buf,
gst_core_media_meta_get_info ());
g_return_val_if_fail (meta != NULL, NULL);
static void
gst_core_media_buffer_class_init (GstCoreMediaBufferClass * klass)
{
GstMiniObjectClass *miniobject_class = GST_MINI_OBJECT_CLASS (klass);
miniobject_class->finalize = gst_core_media_buffer_finalize;
return meta->ctx->cv->CVPixelBufferRetain (meta->pixel_buf);
}

View file

@ -26,38 +26,22 @@
G_BEGIN_DECLS
#define GST_TYPE_CORE_MEDIA_BUFFER (gst_core_media_buffer_get_type ())
#define GST_IS_CORE_MEDIA_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), \
GST_TYPE_CORE_MEDIA_BUFFER))
#define GST_CORE_MEDIA_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), \
GST_TYPE_CORE_MEDIA_BUFFER, GstCoreMediaBuffer))
#define GST_CORE_MEDIA_BUFFER_CAST(obj) ((GstCoreMediaBuffer *) (obj))
typedef struct _GstCoreMediaBuffer GstCoreMediaBuffer;
typedef struct _GstCoreMediaBufferClass GstCoreMediaBufferClass;
struct _GstCoreMediaBuffer
typedef struct _GstCoreMediaMeta
{
GstBuffer buffer;
GstMeta meta;
GstCoreMediaCtx * ctx;
CMSampleBufferRef sample_buf;
CVImageBufferRef image_buf;
CVPixelBufferRef pixel_buf;
CMBlockBufferRef block_buf;
};
} GstCoreMediaMeta;
struct _GstCoreMediaBufferClass
{
GstBufferClass parent_class;
};
GType gst_core_media_buffer_get_type (void) G_GNUC_CONST;
GstBuffer * gst_core_media_buffer_new (GstCoreMediaCtx * ctx,
CMSampleBufferRef sample_buf);
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
(GstCoreMediaBuffer * buf);
(GstBuffer * buf);
G_END_DECLS

View file

@ -19,31 +19,33 @@
#include "corevideobuffer.h"
G_DEFINE_TYPE (GstCoreVideoBuffer, gst_core_video_buffer, GST_TYPE_BUFFER);
static void
gst_core_video_buffer_init (GstCoreVideoBuffer * self)
gst_core_video_meta_free (GstCoreVideoMeta * meta, GstBuffer * buf)
{
GST_BUFFER_FLAG_SET (self, GST_BUFFER_FLAG_READONLY);
}
GstCVApi *cv = meta->ctx->cv;
static void
gst_core_video_buffer_finalize (GstMiniObject * mini_object)
{
GstCoreVideoBuffer *self = GST_CORE_VIDEO_BUFFER_CAST (mini_object);
GstCVApi *cv = self->ctx->cv;
if (self->pixbuf != NULL) {
cv->CVPixelBufferUnlockBaseAddress (self->pixbuf,
if (meta->pixbuf != NULL) {
cv->CVPixelBufferUnlockBaseAddress (meta->pixbuf,
kCVPixelBufferLock_ReadOnly);
}
cv->CVBufferRelease (self->cvbuf);
cv->CVBufferRelease (meta->cvbuf);
g_object_unref (meta->ctx);
}
g_object_unref (self->ctx);
static const GstMetaInfo *
gst_core_video_meta_get_info (void)
{
static const GstMetaInfo *core_video_meta_info = NULL;
GST_MINI_OBJECT_CLASS (gst_core_video_buffer_parent_class)->finalize
(mini_object);
if (core_video_meta_info == NULL) {
core_video_meta_info = gst_meta_register ("GstCoreVideoeMeta",
"GstCoreVideoMeta", sizeof (GstCoreVideoMeta),
(GstMetaInitFunction) NULL,
(GstMetaFreeFunction) gst_core_video_meta_free,
(GstMetaTransformFunction) NULL);
}
return core_video_meta_info;
}
GstBuffer *
@ -53,7 +55,8 @@ gst_core_video_buffer_new (GstCoreMediaCtx * ctx, CVBufferRef cvbuf)
void *data;
size_t size;
CVPixelBufferRef pixbuf = NULL;
GstCoreVideoBuffer *buf;
GstBuffer *buf;
GstCoreVideoMeta *meta;
if (CFGetTypeID (cvbuf) == cv->CVPixelBufferGetTypeID ()) {
pixbuf = (CVPixelBufferRef) cvbuf;
@ -70,25 +73,18 @@ gst_core_video_buffer_new (GstCoreMediaCtx * ctx, CVBufferRef cvbuf)
goto error;
}
buf = GST_CORE_VIDEO_BUFFER_CAST (gst_mini_object_new
(GST_TYPE_CORE_VIDEO_BUFFER));
buf->ctx = g_object_ref (ctx);
buf->cvbuf = cv->CVBufferRetain (cvbuf);
buf->pixbuf = pixbuf;
buf = gst_buffer_new ();
meta = (GstCoreVideoMeta *) gst_buffer_add_meta (buf,
gst_core_video_meta_get_info (), NULL);
meta->ctx = g_object_ref (ctx);
meta->cvbuf = cv->CVBufferRetain (cvbuf);
meta->pixbuf = pixbuf;
gst_buffer_take_memory (buf, -1,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
size, 0, size, NULL, NULL));
GST_BUFFER_DATA (buf) = data;
GST_BUFFER_SIZE (buf) = size;
return GST_BUFFER_CAST (buf);
return buf;
error:
return NULL;
}
static void
gst_core_video_buffer_class_init (GstCoreVideoBufferClass * klass)
{
GstMiniObjectClass *miniobject_class = GST_MINI_OBJECT_CLASS (klass);
miniobject_class->finalize = gst_core_video_buffer_finalize;
}

View file

@ -26,32 +26,15 @@
G_BEGIN_DECLS
#define GST_TYPE_CORE_VIDEO_BUFFER (gst_core_video_buffer_get_type ())
#define GST_IS_CORE_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), \
GST_TYPE_CORE_VIDEO_BUFFER))
#define GST_CORE_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), \
GST_TYPE_CORE_VIDEO_BUFFER, GstCoreVideoBuffer))
#define GST_CORE_VIDEO_BUFFER_CAST(obj) ((GstCoreVideoBuffer *) (obj))
typedef struct _GstCoreVideoBuffer GstCoreVideoBuffer;
typedef struct _GstCoreVideoBufferClass GstCoreVideoBufferClass;
struct _GstCoreVideoBuffer
typedef struct _GstCoreVideoMeta
{
GstBuffer buffer;
GstMeta meta;
GstCoreMediaCtx *ctx;
CVBufferRef cvbuf;
CVPixelBufferRef pixbuf;
};
} GstCoreVideoMeta;
struct _GstCoreVideoBufferClass
{
GstBufferClass parent_class;
};
GType gst_core_video_buffer_get_type (void) G_GNUC_CONST;
GstBuffer * gst_core_video_buffer_new (GstCoreMediaCtx * ctx,
CVBufferRef cvbuf);

View file

@ -60,10 +60,13 @@ plugin_init (GstPlugin * plugin)
res = gst_element_register (plugin, "qtkitvideosrc", GST_RANK_PRIMARY,
GST_TYPE_QTKIT_VIDEO_SRC);
#if 0
res &= gst_element_register (plugin, "miovideosrc", GST_RANK_NONE,
GST_TYPE_MIO_VIDEO_SRC);
#endif
#endif
#if 0
res &= gst_element_register (plugin, "vth264decbin", GST_RANK_NONE,
GST_TYPE_VT_H264_DEC_BIN);
res &= gst_element_register (plugin, "vth264encbin", GST_RANK_NONE,
@ -71,6 +74,7 @@ plugin_init (GstPlugin * plugin)
gst_vtenc_register_elements (plugin);
gst_vtdec_register_elements (plugin);
#endif
return res;
}

View file

@ -38,36 +38,36 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"video/x-raw, "
"format =" DEVICE_YUV_FOURCC ", "
"width = (int) 640, "
"height = (int) 480, "
"framerate = [0/1, 100/1], "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"video/x-raw, "
"format =" DEVICE_YUV_FOURCC ", "
"width = (int) 160, "
"height = (int) 120, "
"framerate = [0/1, 100/1], "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"video/x-raw, "
"format =" DEVICE_YUV_FOURCC ", "
"width = (int) 176, "
"height = (int) 144, "
"framerate = [0/1, 100/1], "
"pixel-aspect-ratio = (fraction) 12/11"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"video/x-raw, "
"format =" DEVICE_YUV_FOURCC ", "
"width = (int) 320, "
"height = (int) 240, "
"framerate = [0/1, 100/1], "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"video/x-raw, "
"format =" DEVICE_YUV_FOURCC ", "
"width = (int) 352, "
"height = (int) 288, "
"framerate = [0/1, 100/1], "
@ -81,8 +81,7 @@ typedef enum _QueueState {
HAS_FRAME_OR_STOP_REQUEST,
} QueueState;
GST_BOILERPLATE (GstQTKitVideoSrc, gst_qtkit_video_src, GstPushSrc,
GST_TYPE_PUSH_SRC);
G_DEFINE_TYPE (GstQTKitVideoSrc, gst_qtkit_video_src, GST_TYPE_PUSH_SRC);
@interface GstQTKitVideoSrcImpl : NSObject {
GstElement *element;
@ -239,7 +238,6 @@ openFailed:
GstStructure *s;
NSDictionary *outputAttrs;
BOOL success;
NSRunLoop *mainRunLoop;
NSTimeInterval interval;
g_assert (device != nil);
@ -293,21 +291,13 @@ openFailed:
[output setDelegate:self];
[session startRunning];
mainRunLoop = [NSRunLoop mainRunLoop];
if ([mainRunLoop currentMode] == nil) {
/* QTCaptureSession::addInput and QTCaptureSession::addOutput call
* NSObject::performSelectorOnMainThread internally. If the mainRunLoop is
* not running we need to run it for a while for those methods to complete
*/
GST_INFO ("mainRunLoop not running");
[[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
}
return YES;
}
- (BOOL)start
{
NSRunLoop *mainRunLoop;
queueLock = [[NSConditionLock alloc] initWithCondition:NO_FRAMES];
queue = [[NSMutableArray alloc] initWithCapacity:FRAME_QUEUE_SIZE];
stopRequest = NO;
@ -318,6 +308,21 @@ openFailed:
fps_d = 1;
duration = GST_CLOCK_TIME_NONE;
/* this will trigger negotiation and open the device in setCaps */
gst_base_src_start_complete (baseSrc, GST_FLOW_OK);
mainRunLoop = [NSRunLoop mainRunLoop];
if ([mainRunLoop currentMode] == nil) {
/* QTCaptureSession::addInput and QTCaptureSession::addOutput, called from
* setCaps, call NSObject::performSelectorOnMainThread internally. If the
* mainRunLoop is not running we need to run it for a while for those
* methods to complete.
*/
GST_INFO ("mainRunLoop not running");
[[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
}
return YES;
}
@ -351,7 +356,7 @@ openFailed:
gst_query_set_latency (query, TRUE, min_latency, max_latency);
}
} else {
result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
result = GST_BASE_SRC_CLASS (gst_qtkit_video_src_parent_class)->query (baseSrc, query);
}
return result;
@ -396,7 +401,7 @@ openFailed:
return GST_STATE_CHANGE_FAILURE;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
ret = GST_ELEMENT_CLASS (gst_qtkit_video_src_parent_class)->change_state (element, transition);
if (transition == GST_STATE_CHANGE_READY_TO_NULL)
[self closeDevice];
@ -514,20 +519,6 @@ static GstFlowReturn gst_qtkit_video_src_create (GstPushSrc * pushsrc,
GstBuffer ** buf);
static void gst_qtkit_video_src_fixate (GstBaseSrc * basesrc, GstCaps * caps);
static void
gst_qtkit_video_src_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"Video Source (QTKit)", "Source/Video",
"Reads frames from a Mac OS X QTKit device",
"Ole André Vadla Ravnås <oravnas@cisco.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
}
static void
gst_qtkit_video_src_class_init (GstQTKitVideoSrcClass * klass)
{
@ -536,6 +527,14 @@ gst_qtkit_video_src_class_init (GstQTKitVideoSrcClass * klass)
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
gst_element_class_set_details_simple (gstelement_class,
"Video Source (QTKit)", "Source/Video",
"Reads frames from a Mac OS X QTKit device",
"Ole André Vadla Ravnås <oravnas@cisco.com>");
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
gobject_class->finalize = gst_qtkit_video_src_finalize;
gobject_class->get_property = gst_qtkit_video_src_get_property;
gobject_class->set_property = gst_qtkit_video_src_set_property;
@ -570,11 +569,15 @@ gst_qtkit_video_src_class_init (GstQTKitVideoSrcClass * klass)
[pool release]
static void
gst_qtkit_video_src_init (GstQTKitVideoSrc * src, GstQTKitVideoSrcClass * gclass)
gst_qtkit_video_src_init (GstQTKitVideoSrc * src)
{
OBJC_CALLOUT_BEGIN ();
src->impl = [[GstQTKitVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
OBJC_CALLOUT_END ();
/* pretend to be async so we can spin the mainRunLoop from the main thread if
* needed (see ::start) */
gst_base_src_set_async (GST_BASE_SRC (src), TRUE);
}
static void
@ -584,7 +587,7 @@ gst_qtkit_video_src_finalize (GObject * obj)
[GST_QTKIT_VIDEO_SRC_IMPL (obj) release];
OBJC_CALLOUT_END ();
G_OBJECT_CLASS (parent_class)->finalize (obj);
G_OBJECT_CLASS (gst_qtkit_video_src_parent_class)->finalize (obj);
}
static void