applemedia: New plugin for Apple multimedia APIs

Provides the following elements:

qtkitvideosrc: OS X video source relying on the QTKit API. Comes with
hard-coded caps as the API does not provide any way of querying for
formats supported by the hardware. Hasn't been tested a lot, but seems
to work.

miovideosrc: OS X video source which uses the undocumented/private
CoreMediaIOServices API, which is also the one used by iChat.
Present on latest version of Leopard and all versions of Snow Leopard.
Has been tested extensively with built-in cameras and TANDBERG's
PrecisionHD USB camera.

vtenc, vtdec: Generic codec wrappers which make use of the undocumented/
private VideoToolbox API on OS X and iOS. List of codecs are currently
hard-coded to H.264 for vtenc, and H.264 + JPEG for vtdec. Can easily be
expanded by adding new entries to the lists, but haven't yet had time to
do that. Should probably also implement probing as available codecs depend
on the OS and its version, and there doesn't seem to be any way to
enumerate the available codecs.

vth264decbin, vth264encbin: Wrapper bins to make it easier to use
vtdec_h264/vtenc_h264 in live scenarios.

iphonecamerasrc: iPhone camera source relying on the undocumented/private
Celestial API. Tested on iOS 3.1 running on an iPhone 3GS. Stops working
after a few minutes, presumably because of a resource leak. Needs some
love.

Note that the iOS parts haven't yet been ported to iOS 4.x.
This commit is contained in:
Ole André Vadla Ravnås 2010-10-27 18:30:11 +01:00 committed by Ole André Vadla Ravnås
parent 13431420eb
commit 0e4e351b83
43 changed files with 8675 additions and 2 deletions

View file

@ -459,6 +459,31 @@ int main ()
AC_SUBST(HAVE_DIRECTDRAW)
])
dnl *** AppleMedia (OS X and iOS) ***
translit(dnm, m, l) AM_CONDITIONAL(USE_APPLE_MEDIA, true)
HAVE_APPLE_MEDIA="no"
dnl SystemConfiguration.h is common to OS X and iOS, so we check for that
AG_GST_CHECK_FEATURE(APPLE_MEDIA, [Apple video], applemedia, [
AC_CHECK_HEADER(SystemConfiguration/SystemConfiguration.h, HAVE_APPLE_MEDIA="yes", HAVE_APPLE_MEDIA="no")
])
dnl in case header SystemConfiguration/SystemConfiguration.h is found on other platforms
case "$host" in
*-*darwin*)
;;
*)
HAVE_APPLE_MEDIA="no"
;;
esac
HAVE_IOS="no"
if test "x$HAVE_APPLE_MEDIA" = "xyes"; then
AC_CHECK_HEADER(MobileCoreServices/MobileCoreServices.h, HAVE_IOS="yes", HAVE_IOS="no", [-])
fi
AM_CONDITIONAL(HAVE_IOS, test "x$HAVE_IOS" = "xyes")
if test "x$HAVE_IOS" = "xyes"; then
AC_DEFINE(HAVE_IOS, 1, [Define if building for Apple iOS])
fi
dnl *** OS X videosrc ***
translit(dnm, m, l) AM_CONDITIONAL(USE_OSX_VIDEO, true)
HAVE_OSX_VIDEO="no"
@ -1737,6 +1762,7 @@ sys/Makefile
sys/dshowdecwrapper/Makefile
sys/acmenc/Makefile
sys/acmmp3dec/Makefile
sys/applemedia/Makefile
sys/directdraw/Makefile
sys/directsound/Makefile
sys/dshowsrcwrapper/Makefile

View file

@ -46,6 +46,12 @@ else
DVB_DIR=
endif
if USE_APPLE_MEDIA
APPLE_MEDIA_DIR=applemedia
else
APPLE_MEDIA_DIR=
endif
if USE_OSX_VIDEO
OSX_VIDEO_DIR=osxvideo
else
@ -82,9 +88,9 @@ else
SHM_DIR=
endif
SUBDIRS = $(ACM_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR)
SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR)
DIST_SUBDIRS = acmenc acmmp3dec directdraw directsound dvb fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
DIST_SUBDIRS = acmenc acmmp3dec applemedia directdraw directsound dvb fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
osxvideo qtwrapper shm vcd vdpau wasapi wininet winks winscreencap
include $(top_srcdir)/common/parallel-subdirs.mak

View file

@ -0,0 +1,85 @@
plugin_LTLIBRARIES = libgstapplemedia.la
libgstapplemedia_la_SOURCES = \
plugin.m \
vth264decbin.c \
vth264encbin.c \
vtenc.c \
vtdec.c \
vtutil.c \
corevideobuffer.c \
coremediabuffer.c \
coremediactx.c \
cvapi.c \
cmapi.c \
vtapi.c \
dynapi.c
libgstapplemedia_la_CFLAGS = \
$(GST_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstapplemedia_la_OBJCFLAGS = \
$(GST_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstapplemedia_la_LIBADD = \
$(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
-lgstinterfaces-$(GST_MAJORMINOR) \
-lgstvideo-$(GST_MAJORMINOR)
libgstapplemedia_la_LIBTOOLFLAGS = --tag=disable-static
libgstapplemedia_la_LDFLAGS = \
$(GST_PLUGIN_LDFLAGS) \
$(GST_BASE_LDFLAGS) \
$(GST_PLUGINS_BASE_LDFLAGS) \
-Wl,-framework -Wl,CoreFoundation
noinst_HEADERS = \
qtkitvideosrc.h \
miovideosrc.h \
miovideodevice.h \
iphonecamerasrc.h \
vth264decbin.h \
vth264encbin.h \
vtenc.h \
vtdec.h \
vtutil.h \
corevideobuffer.h \
coremediabuffer.h \
coremediactx.h \
cvapi.h \
cmapi.h \
mioapi.h \
mtapi.h \
celapi.h \
vtapi.h \
dynapi.h \
dynapi-internal.h
if HAVE_IOS
libgstapplemedia_la_SOURCES += \
iphonecamerasrc.c \
mtapi.c \
celapi.c
else
libgstapplemedia_la_SOURCES += \
qtkitvideosrc.m \
miovideosrc.c \
miovideodevice.c \
mioapi.c
libgstapplemedia_la_LDFLAGS += \
-Wl,-framework -Wl,Cocoa \
-Wl,-framework -Wl,CoreVideo \
-Wl,-framework -Wl,QTKit
endif

57
sys/applemedia/celapi.c Normal file
View file

@ -0,0 +1,57 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "celapi.h"
#include "dynapi-internal.h"
#define CELESTIAL_FRAMEWORK_PATH "/System/Library/PrivateFrameworks/" \
"Celestial.framework/Celestial"
G_DEFINE_TYPE (GstCelApi, gst_cel_api, GST_TYPE_DYN_API);
static void
gst_cel_api_init (GstCelApi * self)
{
}
static void
gst_cel_api_class_init (GstCelApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstCelApi, name)
GstCelApi *
gst_cel_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (FigCreateCaptureDevicesAndStreamsForPreset),
SYM_SPEC (kFigRecorderCapturePreset_AudioRecording),
SYM_SPEC (kFigRecorderCapturePreset_VideoRecording),
SYM_SPEC (kFigRecorderCapturePreset_AudioVideoRecording),
SYM_SPEC (kFigRecorderCapturePreset_PhotoCapture),
{NULL, 0},
};
return _gst_dyn_api_new (gst_cel_api_get_type (), CELESTIAL_FRAMEWORK_PATH,
symbols, error);
}

59
sys/applemedia/celapi.h Normal file
View file

@ -0,0 +1,59 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CEL_API_H__
#define __GST_CEL_API_H__
#include "mtapi.h"
G_BEGIN_DECLS
typedef struct _GstCelApi GstCelApi;
typedef struct _GstCelApiClass GstCelApiClass;
struct _GstCelApi
{
GstDynApi parent;
FigStatus (* FigCreateCaptureDevicesAndStreamsForPreset)
(CFAllocatorRef allocator, CFStringRef capturePreset,
CFDictionaryRef audioOptions,
FigCaptureDeviceRef * outVideoDevice,
FigCaptureStreamRef * outVideoStream,
FigCaptureDeviceRef * outAudioDevice,
FigCaptureStreamRef * outAudioStream);
CFStringRef * kFigRecorderCapturePreset_AudioRecording;
CFStringRef * kFigRecorderCapturePreset_VideoRecording;
CFStringRef * kFigRecorderCapturePreset_AudioVideoRecording;
CFStringRef * kFigRecorderCapturePreset_PhotoCapture;
};
struct _GstCelApiClass
{
GstDynApiClass parent_class;
};
GType gst_cel_api_get_type (void);
GstCelApi * gst_cel_api_obtain (GError ** error);
G_END_DECLS
#endif

95
sys/applemedia/cmapi.c Normal file
View file

@ -0,0 +1,95 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "cmapi.h"
#include "dynapi-internal.h"
#define CM_FRAMEWORK_PATH "/System/Library/PrivateFrameworks/" \
"CoreMedia.framework/CoreMedia"
G_DEFINE_TYPE (GstCMApi, gst_cm_api, GST_TYPE_DYN_API);
static void
gst_cm_api_init (GstCMApi * self)
{
}
static void
gst_cm_api_class_init (GstCMApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstCMApi, name)
GstCMApi *
gst_cm_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (FigBaseObjectGetVTable),
SYM_SPEC (FigGetAttachment),
SYM_SPEC (FigFormatDescriptionRelease),
SYM_SPEC (FigFormatDescriptionRetain),
SYM_SPEC (FigFormatDescriptionEqual),
SYM_SPEC (FigFormatDescriptionGetExtension),
SYM_SPEC (FigFormatDescriptionGetMediaType),
SYM_SPEC (FigFormatDescriptionGetMediaSubType),
SYM_SPEC (FigVideoFormatDescriptionCreate),
SYM_SPEC
(FigVideoFormatDescriptionCreateWithSampleDescriptionExtensionAtom),
SYM_SPEC (FigVideoFormatDescriptionGetDimensions),
SYM_SPEC (FigTimeMake),
SYM_SPEC (FigSampleBufferCreate),
SYM_SPEC (FigSampleBufferDataIsReady),
SYM_SPEC (FigSampleBufferGetDataBuffer),
SYM_SPEC (FigSampleBufferGetFormatDescription),
SYM_SPEC (FigSampleBufferGetImageBuffer),
SYM_SPEC (FigSampleBufferGetNumSamples),
SYM_SPEC (FigSampleBufferGetSampleAttachmentsArray),
SYM_SPEC (FigSampleBufferGetSampleSize),
SYM_SPEC (FigSampleBufferRelease),
SYM_SPEC (FigSampleBufferRetain),
SYM_SPEC (FigBlockBufferCreateWithMemoryBlock),
SYM_SPEC (FigBlockBufferGetDataLength),
SYM_SPEC (FigBlockBufferGetDataPointer),
SYM_SPEC (FigBlockBufferRelease),
SYM_SPEC (FigBlockBufferRetain),
SYM_SPEC (FigBufferQueueDequeueAndRetain),
SYM_SPEC (FigBufferQueueGetBufferCount),
SYM_SPEC (FigBufferQueueIsEmpty),
SYM_SPEC (FigBufferQueueRelease),
SYM_SPEC (FigBufferQueueSetValidationCallback),
SYM_SPEC (kFigFormatDescriptionExtension_SampleDescriptionExtensionAtoms),
SYM_SPEC (kFigSampleAttachmentKey_DependsOnOthers),
SYM_SPEC (kFigTimeInvalid),
{NULL, 0},
};
return _gst_dyn_api_new (gst_cm_api_get_type (), CM_FRAMEWORK_PATH, symbols,
error);
}

193
sys/applemedia/cmapi.h Normal file
View file

@ -0,0 +1,193 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CM_API_H__
#define __GST_CM_API_H__
#include "dynapi.h"
#include <CoreFoundation/CoreFoundation.h>
#include "cvapi.h"
G_BEGIN_DECLS
typedef struct _GstCMApi GstCMApi;
typedef struct _GstCMApiClass GstCMApiClass;
typedef enum _FigStatus FigStatus;
typedef CFTypeRef FigBaseObjectRef;
typedef struct _FigBaseVTable FigBaseVTable;
typedef struct _FigBaseIface FigBaseIface;
typedef struct _FigFormatDescription FigFormatDescription;
typedef struct _FigVideoDimensions FigVideoDimensions;
typedef struct _FigTime FigTime;
typedef CFTypeRef FigBufferQueueRef;
typedef struct _FigSampleBuffer FigSampleBuffer;
typedef struct _FigDataBuffer FigDataBuffer;
typedef struct _FigBlockBuffer FigBlockBuffer;
typedef Boolean (* FigBufferQueueValidateFunc) (FigBufferQueueRef queue,
FigSampleBuffer *buf, void *refCon);
enum _FigStatus
{
kFigSuccess = 0,
kFigResourceBusy = -12780
};
enum _FigMediaType
{
kFigMediaTypeVideo = 'vide'
};
enum _FigCodecType
{
kComponentVideoUnsigned = 'yuvs',
kFigVideoCodecType_JPEG_OpenDML = 'dmb1',
kYUV420vCodecType = '420v'
};
struct _FigBaseVTable
{
gsize unk;
FigBaseIface * base;
void * derived;
};
struct _FigBaseIface
{
gsize unk1;
gsize unk2;
gsize unk3;
FigStatus (* Invalidate) (FigBaseObjectRef obj);
FigStatus (* Finalize) (FigBaseObjectRef obj);
gpointer unk4;
FigStatus (* CopyProperty) (FigBaseObjectRef obj, CFTypeRef key, void *unk,
CFTypeRef * value);
FigStatus (* SetProperty) (FigBaseObjectRef obj, CFTypeRef key,
CFTypeRef value);
};
struct _FigVideoDimensions
{
UInt32 width;
UInt32 height;
};
struct _FigTime
{
UInt8 data[24];
};
struct _GstCMApi
{
GstDynApi parent;
FigBaseVTable * (* FigBaseObjectGetVTable) (FigBaseObjectRef obj);
void * (* FigGetAttachment) (void * obj, CFStringRef attachmentKey,
UInt32 * foundWherePtr);
void (* FigFormatDescriptionRelease) (FigFormatDescription * desc);
FigFormatDescription * (* FigFormatDescriptionRetain) (
FigFormatDescription * desc);
Boolean (* FigFormatDescriptionEqual) (FigFormatDescription * desc1,
FigFormatDescription * desc2);
CFTypeRef (* FigFormatDescriptionGetExtension) (
const FigFormatDescription * desc, CFStringRef extensionKey);
UInt32 (* FigFormatDescriptionGetMediaType) (
const FigFormatDescription * desc);
UInt32 (* FigFormatDescriptionGetMediaSubType) (
const FigFormatDescription * desc);
FigStatus (* FigVideoFormatDescriptionCreate) (
CFAllocatorRef allocator, UInt32 formatId, UInt32 width, UInt32 height,
CFDictionaryRef extensions, FigFormatDescription ** desc);
FigStatus (* FigVideoFormatDescriptionCreateWithSampleDescriptionExtensionAtom)
(CFAllocatorRef allocator, UInt32 formatId, UInt32 width, UInt32 height,
UInt32 atomId, const UInt8 * data, CFIndex len,
FigFormatDescription ** formatDesc);
FigVideoDimensions (* FigVideoFormatDescriptionGetDimensions) (
const FigFormatDescription * desc);
FigTime (* FigTimeMake) (UInt64 numerator, UInt32 denominator);
FigStatus (* FigSampleBufferCreate) (CFAllocatorRef allocator,
FigBlockBuffer * blockBuf, Boolean unkBool, UInt32 unkDW1, UInt32 unkDW2,
FigFormatDescription * fmtDesc, UInt32 unkCountA, UInt32 unkCountB,
const void * unkTimeData, UInt32 unkCountC, const void * unkDWordData,
FigSampleBuffer ** sampleBuffer);
Boolean (* FigSampleBufferDataIsReady) (
const FigSampleBuffer * buf);
FigBlockBuffer * (* FigSampleBufferGetDataBuffer) (
const FigSampleBuffer * buf);
FigFormatDescription * (* FigSampleBufferGetFormatDescription) (
const FigSampleBuffer * buf);
CVImageBufferRef (* FigSampleBufferGetImageBuffer) (
const FigSampleBuffer * buf);
SInt32 (* FigSampleBufferGetNumSamples) (
const FigSampleBuffer * buf);
CFArrayRef (* FigSampleBufferGetSampleAttachmentsArray) (
const FigSampleBuffer * buf, SInt32 sampleIndex);
SInt32 (* FigSampleBufferGetSampleSize) (
const FigSampleBuffer * buf, SInt32 sampleIndex);
void (* FigSampleBufferRelease) (FigSampleBuffer * buf);
FigSampleBuffer * (* FigSampleBufferRetain) (FigSampleBuffer * buf);
FigStatus (* FigBlockBufferCreateWithMemoryBlock)
(CFAllocatorRef allocator, Byte * data, UInt32 size,
CFAllocatorRef dataAllocator, void *unk1, UInt32 sizeA, UInt32 sizeB,
Boolean unkBool, FigBlockBuffer ** blockBuffer);
SInt32 (* FigBlockBufferGetDataLength) (const FigBlockBuffer * buf);
FigStatus (* FigBlockBufferGetDataPointer) (
const FigBlockBuffer * buf, UInt32 unk1, UInt32 unk2, UInt32 unk3,
Byte ** dataPtr);
void (* FigBlockBufferRelease) (FigBlockBuffer * buf);
FigBlockBuffer * (* FigBlockBufferRetain) (FigBlockBuffer * buf);
FigSampleBuffer * (* FigBufferQueueDequeueAndRetain)
(FigBufferQueueRef queue);
CFIndex (* FigBufferQueueGetBufferCount) (FigBufferQueueRef queue);
Boolean (* FigBufferQueueIsEmpty) (FigBufferQueueRef queue);
void (* FigBufferQueueRelease) (FigBufferQueueRef queue);
FigStatus (* FigBufferQueueSetValidationCallback)
(FigBufferQueueRef queue, FigBufferQueueValidateFunc func, void *refCon);
CFStringRef * kFigFormatDescriptionExtension_SampleDescriptionExtensionAtoms;
CFStringRef * kFigSampleAttachmentKey_DependsOnOthers;
FigTime * kFigTimeInvalid;
};
struct _GstCMApiClass
{
GstDynApiClass parent_class;
};
GType gst_cm_api_get_type (void);
GstCMApi * gst_cm_api_obtain (GError ** error);
G_END_DECLS
#endif

View file

@ -0,0 +1,124 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "coremediabuffer.h"
G_DEFINE_TYPE (GstCoreMediaBuffer, gst_core_media_buffer, GST_TYPE_BUFFER);
static void
gst_core_media_buffer_init (GstCoreMediaBuffer * self)
{
GST_BUFFER_FLAG_SET (self, GST_BUFFER_FLAG_READONLY);
}
static void
gst_core_media_buffer_finalize (GstMiniObject * mini_object)
{
GstCoreMediaBuffer *self = GST_CORE_MEDIA_BUFFER_CAST (mini_object);
if (self->image_buf != NULL) {
GstCVApi *cv = self->ctx->cv;
cv->CVPixelBufferUnlockBaseAddress (self->image_buf, 0);
}
self->ctx->cm->FigSampleBufferRelease (self->sample_buf);
g_object_unref (self->ctx);
GST_MINI_OBJECT_CLASS (gst_core_media_buffer_parent_class)->finalize
(mini_object);
}
GstBuffer *
gst_core_media_buffer_new (GstCoreMediaCtx * ctx, FigSampleBuffer * sample_buf)
{
GstCVApi *cv = ctx->cv;
GstCMApi *cm = ctx->cm;
CVImageBufferRef image_buf;
CVPixelBufferRef pixel_buf;
FigBlockBuffer *block_buf;
Byte *data = NULL;
UInt32 size;
FigStatus status;
GstCoreMediaBuffer *buf;
image_buf = cm->FigSampleBufferGetImageBuffer (sample_buf);
pixel_buf = NULL;
block_buf = cm->FigSampleBufferGetDataBuffer (sample_buf);
if (image_buf != NULL &&
CFGetTypeID (image_buf) == cv->CVPixelBufferGetTypeID ()) {
pixel_buf = (CVPixelBufferRef) image_buf;
if (cv->CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess)
goto error;
if (cv->CVPixelBufferIsPlanar (pixel_buf)) {
gint plane_count, plane_idx;
data = cv->CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0);
size = 0;
plane_count = cv->CVPixelBufferGetPlaneCount (pixel_buf);
for (plane_idx = 0; plane_idx != plane_count; plane_idx++) {
size += cv->CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) *
cv->CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx);
}
} else {
data = cv->CVPixelBufferGetBaseAddress (pixel_buf);
size = cv->CVPixelBufferGetBytesPerRow (pixel_buf) *
cv->CVPixelBufferGetHeight (pixel_buf);
}
} else if (block_buf != NULL) {
status = cm->FigBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data);
if (status != kFigSuccess)
goto error;
size = cm->FigBlockBufferGetDataLength (block_buf);
} else {
goto error;
}
buf =
GST_CORE_MEDIA_BUFFER (gst_mini_object_new (GST_TYPE_CORE_MEDIA_BUFFER));
buf->ctx = g_object_ref (ctx);
buf->sample_buf = cm->FigSampleBufferRetain (sample_buf);
buf->image_buf = image_buf;
buf->pixel_buf = pixel_buf;
buf->block_buf = block_buf;
GST_BUFFER_DATA (buf) = data;
GST_BUFFER_SIZE (buf) = size;
return GST_BUFFER_CAST (buf);
error:
return NULL;
}
CVPixelBufferRef
gst_core_media_buffer_get_pixel_buffer (GstCoreMediaBuffer * buf)
{
return buf->ctx->cv->CVPixelBufferRetain (buf->pixel_buf);
}
static void
gst_core_media_buffer_class_init (GstCoreMediaBufferClass * klass)
{
GstMiniObjectClass *miniobject_class = GST_MINI_OBJECT_CLASS (klass);
miniobject_class->finalize = gst_core_media_buffer_finalize;
}

View file

@ -0,0 +1,64 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CORE_MEDIA_BUFFER_H__
#define __GST_CORE_MEDIA_BUFFER_H__
#include <gst/gst.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_TYPE_CORE_MEDIA_BUFFER (gst_core_media_buffer_get_type ())
#define GST_IS_CORE_MEDIA_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), \
GST_TYPE_CORE_MEDIA_BUFFER))
#define GST_CORE_MEDIA_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), \
GST_TYPE_CORE_MEDIA_BUFFER, GstCoreMediaBuffer))
#define GST_CORE_MEDIA_BUFFER_CAST(obj) ((GstCoreMediaBuffer *) (obj))
typedef struct _GstCoreMediaBuffer GstCoreMediaBuffer;
typedef struct _GstCoreMediaBufferClass GstCoreMediaBufferClass;
struct _GstCoreMediaBuffer
{
GstBuffer buffer;
GstCoreMediaCtx * ctx;
FigSampleBuffer * sample_buf;
CVImageBufferRef image_buf;
CVPixelBufferRef pixel_buf;
FigBlockBuffer * block_buf;
};
struct _GstCoreMediaBufferClass
{
GstBufferClass parent_class;
};
GType gst_core_media_buffer_get_type (void) G_GNUC_CONST;
GstBuffer * gst_core_media_buffer_new (GstCoreMediaCtx * ctx,
FigSampleBuffer * sample_buf);
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
(GstCoreMediaBuffer * buf);
G_END_DECLS
#endif /* __GST_CORE_MEDIA_BUFFER_H__ */

View file

@ -0,0 +1,135 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "coremediactx.h"
#include <gst/gst.h>
typedef struct _GstApiProvider GstApiProvider;
typedef gpointer (*GstApiProviderObtainFunc) (GError ** error);
struct _GstApiProvider
{
GstCoreMediaApi api;
GstApiProviderObtainFunc obtain;
guint offset;
};
#define API_PROVIDER(AN, a_n) \
{ GST_API_##AN, (GstApiProviderObtainFunc) gst_##a_n##_api_obtain, \
G_STRUCT_OFFSET (GstCoreMediaCtx, a_n) }
static const GstApiProvider api_provider[] = {
API_PROVIDER (CORE_VIDEO, cv),
API_PROVIDER (CORE_MEDIA, cm),
API_PROVIDER (VIDEO_TOOLBOX, vt),
#ifdef HAVE_IOS
API_PROVIDER (MEDIA_TOOLBOX, mt),
API_PROVIDER (CELESTIAL, cel)
#else
API_PROVIDER (MIO, mio),
#endif
};
G_DEFINE_TYPE (GstCoreMediaCtx, gst_core_media_ctx, G_TYPE_OBJECT);
static void
gst_core_media_ctx_init (GstCoreMediaCtx * self)
{
}
static void
gst_core_media_ctx_dispose (GObject * object)
{
GstCoreMediaCtx *self = GST_CORE_MEDIA_CTX_CAST (object);
guint i;
for (i = 0; i != G_N_ELEMENTS (api_provider); i++) {
const GstApiProvider *ap = &api_provider[i];
gpointer *api_ptr = (gpointer *) ((guint8 *) self + ap->offset);
if (*api_ptr != NULL) {
g_object_unref (*api_ptr);
*api_ptr = NULL;
}
}
G_OBJECT_CLASS (gst_core_media_ctx_parent_class)->dispose (object);
}
static void
gst_core_media_ctx_class_init (GstCoreMediaCtxClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->dispose = gst_core_media_ctx_dispose;
}
GstCoreMediaCtx *
gst_core_media_ctx_new (GstCoreMediaApi required_apis, GError ** error)
{
GstCoreMediaCtx *ctx;
GArray *error_messages;
guint i;
ctx = g_object_new (GST_TYPE_CORE_MEDIA_CTX, NULL);
error_messages = g_array_new (TRUE, FALSE, sizeof (gchar *));
for (i = 0; i != G_N_ELEMENTS (api_provider); i++) {
const GstApiProvider *ap = &api_provider[i];
if ((required_apis & ap->api) != 0) {
gpointer *api_ptr = (gpointer *) ((guint8 *) ctx + ap->offset);
GError *tmp_error = NULL;
*api_ptr = ap->obtain (&tmp_error);
if (tmp_error != NULL) {
gchar *message_copy = g_strdup (tmp_error->message);
g_array_append_val (error_messages, message_copy);
g_clear_error (&tmp_error);
}
}
}
if (error_messages->len != 0) {
gchar *errors_joined;
errors_joined = g_strjoinv ("\n\t* ", (gchar **) error_messages->data);
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_FAILED,
"Could not obtain required API%s:%s%s",
(error_messages->len == 1) ? "" : "s",
(error_messages->len == 1) ? " " : "\n\t* ", errors_joined);
g_free (errors_joined);
g_object_unref (ctx);
ctx = NULL;
}
for (i = 0; i != error_messages->len; i++)
g_free (g_array_index (error_messages, gchar *, i));
g_array_free (error_messages, TRUE);
return ctx;
}

View file

@ -0,0 +1,98 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CORE_MEDIA_CTX_H__
#define __GST_CORE_MEDIA_CTX_H__
#include "cvapi.h"
#include "cmapi.h"
#include "vtapi.h"
#include "mioapi.h"
#include "mtapi.h"
#include "celapi.h"
#include <glib.h>
G_BEGIN_DECLS
#define GST_TYPE_CORE_MEDIA_CTX \
(gst_core_media_ctx_get_type ())
#define GST_CORE_MEDIA_CTX(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_CORE_MEDIA_CTX, GstCoreMediaCtx))
#define GST_CORE_MEDIA_CTX_CAST(obj) \
((GstCoreMediaCtx *) (obj))
#define GST_CORE_MEDIA_CTX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_CORE_MEDIA_CTX, GstCoreMediaCtxClass))
#define GST_IS_CORE_MEDIA_CTX(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_CORE_MEDIA_CTX))
#define GST_IS_CORE_MEDIA_CTX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_CORE_MEDIA_CTX))
#define GST_DYN_SYM_SPEC(type, name) \
{ G_STRINGIFY (name), G_STRUCT_OFFSET (type, name) }
typedef struct _GstCoreMediaCtx GstCoreMediaCtx;
typedef struct _GstCoreMediaCtxClass GstCoreMediaCtxClass;
typedef enum _GstCoreMediaApi GstCoreMediaApi;
struct _GstCoreMediaCtx
{
GObject parent;
/* Common */
GstCVApi *cv;
GstCMApi *cm;
GstVTApi *vt;
/* OS X */
GstMIOApi *mio;
/* iPhone */
GstMTApi *mt;
GstCelApi *cel;
};
struct _GstCoreMediaCtxClass
{
GObjectClass parent_class;
};
enum _GstCoreMediaApi
{
GST_API_CORE_VIDEO = (1 << 0),
GST_API_CORE_MEDIA = (1 << 1),
GST_API_VIDEO_TOOLBOX = (1 << 2),
GST_API_MIO = (1 << 3),
GST_API_MEDIA_TOOLBOX = (1 << 4),
GST_API_CELESTIAL = (1 << 5)
};
GType gst_core_media_ctx_get_type (void);
GstCoreMediaCtx * gst_core_media_ctx_new (GstCoreMediaApi required_apis,
GError ** error);
G_END_DECLS
#endif

View file

@ -0,0 +1,90 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "corevideobuffer.h"
G_DEFINE_TYPE (GstCoreVideoBuffer, gst_core_video_buffer, GST_TYPE_BUFFER);
static void
gst_core_video_buffer_init (GstCoreVideoBuffer * self)
{
GST_BUFFER_FLAG_SET (self, GST_BUFFER_FLAG_READONLY);
}
static void
gst_core_video_buffer_finalize (GstMiniObject * mini_object)
{
GstCoreVideoBuffer *self = GST_CORE_VIDEO_BUFFER_CAST (mini_object);
GstCVApi *cv = self->ctx->cv;
if (self->pixbuf != NULL)
cv->CVPixelBufferUnlockBaseAddress (self->pixbuf, 0);
cv->CVBufferRelease (self->cvbuf);
g_object_unref (self->ctx);
GST_MINI_OBJECT_CLASS (gst_core_video_buffer_parent_class)->finalize
(mini_object);
}
GstBuffer *
gst_core_video_buffer_new (GstCoreMediaCtx * ctx, CVBufferRef cvbuf)
{
GstCVApi *cv = ctx->cv;
void *data;
size_t size;
CVPixelBufferRef pixbuf = NULL;
GstCoreVideoBuffer *buf;
if (CFGetTypeID (cvbuf) == cv->CVPixelBufferGetTypeID ()) {
pixbuf = (CVPixelBufferRef) cvbuf;
if (cv->CVPixelBufferLockBaseAddress (pixbuf, 0) != kCVReturnSuccess)
goto error;
data = cv->CVPixelBufferGetBaseAddress (pixbuf);
size = cv->CVPixelBufferGetBytesPerRow (pixbuf) *
cv->CVPixelBufferGetHeight (pixbuf);
} else {
/* TODO: Do we need to handle other buffer types? */
goto error;
}
buf = GST_CORE_VIDEO_BUFFER_CAST (gst_mini_object_new
(GST_TYPE_CORE_VIDEO_BUFFER));
buf->ctx = g_object_ref (ctx);
buf->cvbuf = cv->CVBufferRetain (cvbuf);
buf->pixbuf = pixbuf;
GST_BUFFER_DATA (buf) = data;
GST_BUFFER_SIZE (buf) = size;
return GST_BUFFER_CAST (buf);
error:
return NULL;
}
static void
gst_core_video_buffer_class_init (GstCoreVideoBufferClass * klass)
{
GstMiniObjectClass *miniobject_class = GST_MINI_OBJECT_CLASS (klass);
miniobject_class->finalize = gst_core_video_buffer_finalize;
}

View file

@ -0,0 +1,60 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CORE_VIDEO_BUFFER_H__
#define __GST_CORE_VIDEO_BUFFER_H__
#include <gst/gst.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_TYPE_CORE_VIDEO_BUFFER (gst_core_video_buffer_get_type ())
#define GST_IS_CORE_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), \
GST_TYPE_CORE_VIDEO_BUFFER))
#define GST_CORE_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), \
GST_TYPE_CORE_VIDEO_BUFFER, GstCoreVideoBuffer))
#define GST_CORE_VIDEO_BUFFER_CAST(obj) ((GstCoreVideoBuffer *) (obj))
typedef struct _GstCoreVideoBuffer GstCoreVideoBuffer;
typedef struct _GstCoreVideoBufferClass GstCoreVideoBufferClass;
struct _GstCoreVideoBuffer
{
GstBuffer buffer;
GstCoreMediaCtx *ctx;
CVBufferRef cvbuf;
CVPixelBufferRef pixbuf;
};
struct _GstCoreVideoBufferClass
{
GstBufferClass parent_class;
};
GType gst_core_video_buffer_get_type (void) G_GNUC_CONST;
GstBuffer * gst_core_video_buffer_new (GstCoreMediaCtx * ctx,
CVBufferRef cvbuf);
G_END_DECLS
#endif /* __GST_CORE_VIDEO_BUFFER_H__ */

73
sys/applemedia/cvapi.c Normal file
View file

@ -0,0 +1,73 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "cvapi.h"
#include "dynapi-internal.h"
#define CV_FRAMEWORK_PATH "/System/Library/Frameworks/CoreVideo.framework/" \
"CoreVideo"
G_DEFINE_TYPE (GstCVApi, gst_cv_api, GST_TYPE_DYN_API);
static void
gst_cv_api_init (GstCVApi * self)
{
}
static void
gst_cv_api_class_init (GstCVApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstCVApi, name)
GstCVApi *
gst_cv_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (CVBufferRelease),
SYM_SPEC (CVBufferRetain),
SYM_SPEC (CVPixelBufferCreateWithBytes),
SYM_SPEC (CVPixelBufferGetBaseAddress),
SYM_SPEC (CVPixelBufferGetBaseAddressOfPlane),
SYM_SPEC (CVPixelBufferGetBytesPerRow),
SYM_SPEC (CVPixelBufferGetBytesPerRowOfPlane),
SYM_SPEC (CVPixelBufferGetHeight),
SYM_SPEC (CVPixelBufferGetHeightOfPlane),
SYM_SPEC (CVPixelBufferGetPlaneCount),
SYM_SPEC (CVPixelBufferGetTypeID),
SYM_SPEC (CVPixelBufferIsPlanar),
SYM_SPEC (CVPixelBufferLockBaseAddress),
SYM_SPEC (CVPixelBufferRelease),
SYM_SPEC (CVPixelBufferRetain),
SYM_SPEC (CVPixelBufferUnlockBaseAddress),
SYM_SPEC (kCVPixelBufferPixelFormatTypeKey),
SYM_SPEC (kCVPixelBufferWidthKey),
SYM_SPEC (kCVPixelBufferHeightKey),
SYM_SPEC (kCVPixelBufferBytesPerRowAlignmentKey),
{NULL, 0},
};
return _gst_dyn_api_new (gst_cv_api_get_type (), CV_FRAMEWORK_PATH, symbols,
error);
}

107
sys/applemedia/cvapi.h Normal file
View file

@ -0,0 +1,107 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_CV_API_H__
#define __GST_CV_API_H__
#include "dynapi.h"
#include <CoreFoundation/CoreFoundation.h>
G_BEGIN_DECLS
typedef struct _GstCVApi GstCVApi;
typedef struct _GstCVApiClass GstCVApiClass;
typedef int32_t CVReturn;
typedef uint64_t CVOptionFlags;
typedef struct _CVBuffer * CVBufferRef;
typedef CVBufferRef CVImageBufferRef;
typedef CVImageBufferRef CVPixelBufferRef;
typedef void (* CVPixelBufferReleaseBytesCallback) (void * releaseRefCon,
const void * baseAddress);
enum _CVReturn
{
kCVReturnSuccess = 0
};
enum _CVPixelFormatType
{
kCVPixelFormatType_422YpCbCr8Deprecated = 'yuvs',
kCVPixelFormatType_422YpCbCr8 = '2vuy'
};
struct _GstCVApi
{
GstDynApi parent;
void (* CVBufferRelease) (CVBufferRef buffer);
CVBufferRef (* CVBufferRetain) (CVBufferRef buffer);
CVReturn (* CVPixelBufferCreateWithBytes)
(CFAllocatorRef allocator, size_t width, size_t height,
OSType pixelFormatType, void * baseAddress, size_t bytesPerRow,
CVPixelBufferReleaseBytesCallback releaseCallback,
void * releaseRefCon, CFDictionaryRef pixelBufferAttributes,
CVPixelBufferRef * pixelBufferOut);
void * (* CVPixelBufferGetBaseAddress)
(CVPixelBufferRef pixelBuffer);
void * (* CVPixelBufferGetBaseAddressOfPlane)
(CVPixelBufferRef pixelBuffer, size_t planeIndex);
size_t (* CVPixelBufferGetBytesPerRow)
(CVPixelBufferRef pixelBuffer);
size_t (* CVPixelBufferGetBytesPerRowOfPlane)
(CVPixelBufferRef pixelBuffer, size_t planeIndex);
size_t (* CVPixelBufferGetHeight) (CVPixelBufferRef pixelBuffer);
size_t (* CVPixelBufferGetHeightOfPlane)
(CVPixelBufferRef pixelBuffer, size_t planeIndex);
size_t (* CVPixelBufferGetPlaneCount)
(CVPixelBufferRef pixelBuffer);
CFTypeID (* CVPixelBufferGetTypeID) (void);
Boolean (* CVPixelBufferIsPlanar) (CVPixelBufferRef pixelBuffer);
CVReturn (* CVPixelBufferLockBaseAddress)
(CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags);
void (* CVPixelBufferRelease) (CVPixelBufferRef pixelBuffer);
CVPixelBufferRef (* CVPixelBufferRetain)
(CVPixelBufferRef pixelBuffer);
CVReturn (* CVPixelBufferUnlockBaseAddress)
(CVPixelBufferRef pixelBuffer, CVOptionFlags unlockFlags);
CFStringRef * kCVPixelBufferPixelFormatTypeKey;
CFStringRef * kCVPixelBufferWidthKey;
CFStringRef * kCVPixelBufferHeightKey;
CFStringRef * kCVPixelBufferBytesPerRowAlignmentKey;
};
struct _GstCVApiClass
{
GstDynApiClass parent_class;
};
GType gst_cv_api_get_type (void);
GstCVApi * gst_cv_api_obtain (GError ** error);
G_END_DECLS
#endif

View file

@ -0,0 +1,41 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_DYN_API_INTERNAL_H__
#define __GST_DYN_API_INTERNAL_H__
#include <glib-object.h>
G_BEGIN_DECLS
typedef struct _GstDynSymSpec GstDynSymSpec;
struct _GstDynSymSpec
{
const gchar * name;
guint offset;
};
gpointer _gst_dyn_api_new (GType derived_type, const gchar * filename,
const GstDynSymSpec * symbols, GError ** error);
G_END_DECLS
#endif

192
sys/applemedia/dynapi.c Normal file
View file

@ -0,0 +1,192 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "dynapi.h"
#include "dynapi-internal.h"
#include <gmodule.h>
#include <gst/gst.h>
GST_DEBUG_CATEGORY (gst_dyn_api_debug);
#define GST_CAT_DEFAULT gst_dyn_api_debug
enum
{
PROP_0,
PROP_FILENAME
};
struct _GstDynApiPrivate
{
gchar *filename;
GModule *module;
};
G_DEFINE_TYPE (GstDynApi, gst_dyn_api, G_TYPE_OBJECT);
static void
gst_dyn_api_init (GstDynApi * self)
{
self->priv = G_TYPE_INSTANCE_GET_PRIVATE (self, GST_TYPE_DYN_API,
GstDynApiPrivate);
}
static void
gst_dyn_api_dispose (GObject * object)
{
GstDynApi *self = GST_DYN_API_CAST (object);
GstDynApiPrivate *priv = self->priv;
if (priv->module != NULL) {
g_module_close (priv->module);
priv->module = NULL;
}
G_OBJECT_CLASS (gst_dyn_api_parent_class)->dispose (object);
}
static void
gst_dyn_api_finalize (GObject * object)
{
GstDynApi *self = GST_DYN_API_CAST (object);
GstDynApiPrivate *priv = self->priv;
g_free (priv->filename);
G_OBJECT_CLASS (gst_dyn_api_parent_class)->finalize (object);
}
static void
gst_dyn_api_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstDynApi *self = GST_DYN_API (object);
switch (prop_id) {
case PROP_FILENAME:
g_value_set_string (value, self->priv->filename);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_dyn_api_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstDynApi *self = GST_DYN_API (object);
switch (prop_id) {
case PROP_FILENAME:
g_free (self->priv->filename);
self->priv->filename = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_dyn_api_class_init (GstDynApiClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->dispose = gst_dyn_api_dispose;
gobject_class->finalize = gst_dyn_api_finalize;
gobject_class->get_property = gst_dyn_api_get_property;
gobject_class->set_property = gst_dyn_api_set_property;
g_type_class_add_private (klass, sizeof (GstDynApiPrivate));
g_object_class_install_property (gobject_class, PROP_FILENAME,
g_param_spec_string ("filename", "Filename", "Filename", NULL,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
}
gpointer
_gst_dyn_api_new (GType derived_type, const gchar * filename,
const GstDynSymSpec * symbols, GError ** error)
{
GstDynApi *api;
GstDynApiPrivate *priv;
guint i;
GArray *names_not_found;
api = g_object_new (derived_type, "filename", filename, NULL);
priv = api->priv;
priv->module = g_module_open (priv->filename, 0);
if (priv->module == NULL)
goto open_failed;
names_not_found = g_array_new (TRUE, FALSE, sizeof (gchar *));
for (i = 0; symbols[i].name != NULL; i++) {
if (!g_module_symbol (priv->module, symbols[i].name,
(gpointer *) (((guint8 *) api) + symbols[i].offset))) {
g_array_append_val (names_not_found, symbols[i].name);
}
}
if (names_not_found->len > 0)
goto one_or_more_name_not_found;
g_array_free (names_not_found, TRUE);
return api;
/* ERRORS */
open_failed:
{
gchar *basename;
basename = g_path_get_basename (filename);
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_FAILED,
"failed to open %s", basename);
g_free (basename);
goto any_error;
}
one_or_more_name_not_found:
{
gchar *basename, *names_joined;
basename = g_path_get_basename (filename);
names_joined = g_strjoinv (", ", (gchar **) names_not_found->data);
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_FAILED,
"missing %u symbol%s in %s: %s",
names_not_found->len, (names_not_found->len == 1) ? "" : "s",
basename, names_joined);
g_free (names_joined);
g_free (basename);
g_array_free (names_not_found, TRUE);
goto any_error;
}
any_error:
{
g_object_unref (api);
return NULL;
}
}

63
sys/applemedia/dynapi.h Normal file
View file

@ -0,0 +1,63 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_DYN_API_H__
#define __GST_DYN_API_H__
#include <glib-object.h>
G_BEGIN_DECLS
#define GST_TYPE_DYN_API \
(gst_dyn_api_get_type ())
#define GST_DYN_API(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DYN_API, GstDynApi))
#define GST_DYN_API_CAST(obj) \
((GstDynApi *) (obj))
#define GST_DYN_API_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DYN_API, GstDynApiClass))
#define GST_IS_DYN_API(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DYN_API))
#define GST_IS_DYN_API_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DYN_API))
#define GST_DYN_SYM_SPEC(type, name) \
{ G_STRINGIFY (name), G_STRUCT_OFFSET (type, name) }
typedef struct _GstDynApi GstDynApi;
typedef struct _GstDynApiClass GstDynApiClass;
typedef struct _GstDynApiPrivate GstDynApiPrivate;
struct _GstDynApi
{
GObject parent;
GstDynApiPrivate * priv;
};
struct _GstDynApiClass
{
GObjectClass parent_class;
};
GType gst_dyn_api_get_type (void);
G_END_DECLS
#endif

View file

@ -0,0 +1,773 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "iphonecamerasrc.h"
#include "coremediabuffer.h"
#include <gst/video/video.h>
#define DEFAULT_DO_STATS FALSE
#define BUFQUEUE_LOCK(instance) GST_OBJECT_LOCK (instance)
#define BUFQUEUE_UNLOCK(instance) GST_OBJECT_UNLOCK (instance)
#define BUFQUEUE_WAIT(instance) \
g_cond_wait (instance->cond, GST_OBJECT_GET_LOCK (instance))
#define BUFQUEUE_NOTIFY(instance) g_cond_signal (instance->cond)
GST_DEBUG_CATEGORY (gst_iphone_camera_src_debug);
#define GST_CAT_DEFAULT gst_iphone_camera_src_debug
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YUY2") ";"
GST_VIDEO_CAPS_YUV ("I420"))
);
enum
{
PROP_0,
PROP_DO_STATS
};
typedef struct
{
guint index;
GstVideoFormat video_format;
guint32 fourcc;
gint width;
gint height;
gint fps_n;
gint fps_d;
} GstIPhoneCameraFormat;
static gboolean gst_iphone_camera_src_open_device (GstIPhoneCameraSrc * self);
static void gst_iphone_camera_src_close_device (GstIPhoneCameraSrc * self);
static void gst_iphone_camera_src_ensure_device_caps_and_formats
(GstIPhoneCameraSrc * self);
static void gst_iphone_camera_src_release_device_caps_and_formats
(GstIPhoneCameraSrc * self);
static gboolean gst_iphone_camera_src_select_format (GstIPhoneCameraSrc * self,
GstIPhoneCameraFormat * format);
static gboolean gst_iphone_camera_src_parse_imager_format
(GstIPhoneCameraSrc * self, guint index, CFDictionaryRef imager_format,
GstIPhoneCameraFormat * format);
static FigStatus gst_iphone_camera_src_set_device_property_i32
(GstIPhoneCameraSrc * self, CFStringRef name, SInt32 value);
static FigStatus gst_iphone_camera_src_set_device_property_cstr
(GstIPhoneCameraSrc * self, const gchar * name, const gchar * value);
static GstPushSrcClass *parent_class;
GST_BOILERPLATE (GstIPhoneCameraSrc, gst_iphone_camera_src, GstPushSrc,
GST_TYPE_PUSH_SRC);
static void
gst_iphone_camera_src_init (GstIPhoneCameraSrc * self,
GstIPhoneCameraSrcClass * gclass)
{
GstBaseSrc *base_src = GST_BASE_SRC_CAST (self);
gst_base_src_set_live (base_src, TRUE);
gst_base_src_set_format (base_src, GST_FORMAT_TIME);
self->cond = g_cond_new ();
}
static void
gst_iphone_camera_src_dispose (GObject * object)
{
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_iphone_camera_src_finalize (GObject * object)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (object);
g_cond_free (self->cond);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_iphone_camera_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (object);
switch (prop_id) {
case PROP_DO_STATS:
g_value_set_boolean (value, self->do_stats);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_iphone_camera_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (object);
switch (prop_id) {
case PROP_DO_STATS:
self->do_stats = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_iphone_camera_src_change_state (GstElement * element,
GstStateChange transition)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
if (!gst_iphone_camera_src_open_device (self))
goto open_failed;
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
gst_iphone_camera_src_close_device (self);
break;
default:
break;
}
return ret;
/* ERRORS */
open_failed:
{
return GST_STATE_CHANGE_FAILURE;
}
}
static GstCaps *
gst_iphone_camera_src_get_caps (GstBaseSrc * basesrc)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (basesrc);
GstCaps *result;
if (self->device != NULL) {
gst_iphone_camera_src_ensure_device_caps_and_formats (self);
result = gst_caps_ref (self->device_caps);
} else {
result = NULL;
}
if (result != NULL) {
gchar *str;
str = gst_caps_to_string (result);
GST_DEBUG_OBJECT (self, "returning: %s", str);
g_free (str);
}
return result;
}
static gboolean
gst_iphone_camera_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (basesrc);
GstVideoFormat video_format;
gint width, height, fps_n, fps_d;
guint i;
GstIPhoneCameraFormat *selected_format;
if (self->device == NULL)
goto no_device;
if (!gst_video_format_parse_caps (caps, &video_format, &width, &height))
goto invalid_format;
if (!gst_video_parse_caps_framerate (caps, &fps_n, &fps_d))
goto invalid_format;
gst_iphone_camera_src_ensure_device_caps_and_formats (self);
selected_format = NULL;
for (i = 0; i != self->device_formats->len; i++) {
GstIPhoneCameraFormat *format;
format = &g_array_index (self->device_formats, GstIPhoneCameraFormat, i);
if (format->video_format == video_format &&
format->width == width && format->height == height &&
format->fps_n == fps_n && format->fps_d == fps_d) {
selected_format = format;
break;
}
}
if (selected_format == NULL)
goto invalid_format;
GST_DEBUG_OBJECT (self, "selecting format %u", selected_format->index);
if (!gst_iphone_camera_src_select_format (self, selected_format))
goto select_failed;
gst_iphone_camera_src_release_device_caps_and_formats (self);
return TRUE;
/* ERRORS */
no_device:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("no device"), (NULL));
return FALSE;
}
invalid_format:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("invalid format"), (NULL));
return FALSE;
}
select_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("failed to select format"),
(NULL));
return FALSE;
}
}
static gboolean
gst_iphone_camera_src_start (GstBaseSrc * basesrc)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (basesrc);
self->running = TRUE;
self->offset = 0;
return TRUE;
}
static gboolean
gst_iphone_camera_src_stop (GstBaseSrc * basesrc)
{
return TRUE;
}
static gboolean
gst_iphone_camera_src_query (GstBaseSrc * basesrc, GstQuery * query)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (basesrc);
gboolean result = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
GstClockTime min_latency, max_latency;
if (self->device == NULL || !GST_CLOCK_TIME_IS_VALID (self->duration))
goto beach;
min_latency = max_latency = self->duration;
GST_DEBUG_OBJECT (self, "reporting latency of min %" GST_TIME_FORMAT
" max %" GST_TIME_FORMAT,
GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
gst_query_set_latency (query, TRUE, min_latency, max_latency);
result = TRUE;
break;
}
default:
result = GST_BASE_SRC_CLASS (parent_class)->query (basesrc, query);
break;
}
beach:
return result;
}
static gboolean
gst_iphone_camera_src_unlock (GstBaseSrc * basesrc)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (basesrc);
BUFQUEUE_LOCK (self);
self->running = FALSE;
BUFQUEUE_UNLOCK (self);
return TRUE;
}
static gboolean
gst_iphone_camera_src_unlock_stop (GstBaseSrc * basesrc)
{
return TRUE;
}
static Boolean
gst_iphone_camera_src_validate (FigBufferQueueRef queue, FigSampleBuffer * buf,
void *refCon)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (refCon);
BUFQUEUE_LOCK (self);
self->has_pending = TRUE;
BUFQUEUE_NOTIFY (self);
BUFQUEUE_UNLOCK (self);
return FALSE;
}
static GstFlowReturn
gst_iphone_camera_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
{
GstIPhoneCameraSrc *self = GST_IPHONE_CAMERA_SRC_CAST (pushsrc);
GstCMApi *cm = self->ctx->cm;
FigSampleBuffer *sbuf = NULL;
GstClock *clock;
GstClockTime ts;
BUFQUEUE_LOCK (self);
while (self->running && !self->has_pending)
BUFQUEUE_WAIT (self);
sbuf = cm->FigBufferQueueDequeueAndRetain (self->queue);
self->has_pending = !cm->FigBufferQueueIsEmpty (self->queue);
BUFQUEUE_UNLOCK (self);
if (G_UNLIKELY (!self->running))
goto shutting_down;
GST_OBJECT_LOCK (self);
if ((clock = GST_ELEMENT_CLOCK (self)) != NULL) {
ts = gst_clock_get_time (clock);
if (ts > GST_ELEMENT (self)->base_time)
ts -= GST_ELEMENT (self)->base_time;
else
ts = 0;
if (ts > self->duration)
ts -= self->duration;
else
ts = 0;
} else {
ts = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (self);
*buf = gst_core_media_buffer_new (self->ctx, sbuf);
GST_BUFFER_OFFSET (*buf) = self->offset;
GST_BUFFER_OFFSET_END (*buf) = self->offset + 1;
GST_BUFFER_TIMESTAMP (*buf) = ts;
GST_BUFFER_DURATION (*buf) = self->duration;
if (self->offset == 0) {
GST_BUFFER_FLAG_SET (*buf, GST_BUFFER_FLAG_DISCONT);
}
self->offset++;
cm->FigSampleBufferRelease (sbuf);
return GST_FLOW_OK;
/* ERRORS */
shutting_down:
{
cm->FigSampleBufferRelease (sbuf);
return GST_FLOW_WRONG_STATE;
}
}
static gboolean
gst_iphone_camera_src_open_device (GstIPhoneCameraSrc * self)
{
GstCoreMediaCtx *ctx = NULL;
GError *error = NULL;
GstCMApi *cm = NULL;
GstMTApi *mt = NULL;
GstCelApi *cel = NULL;
FigStatus status;
FigCaptureDeviceRef device = NULL;
FigBaseObjectRef device_base;
FigBaseVTable *device_vt;
FigCaptureStreamRef stream = NULL;
FigBaseObjectRef stream_base;
FigBaseVTable *stream_vt;
FigCaptureStreamIface *stream_iface;
FigBufferQueueRef queue = NULL;
ctx = gst_core_media_ctx_new (GST_API_CORE_VIDEO | GST_API_CORE_MEDIA
| GST_API_MEDIA_TOOLBOX | GST_API_CELESTIAL, &error);
if (error != NULL)
goto api_error;
cm = ctx->cm;
mt = ctx->mt;
cel = ctx->cel;
status = cel->FigCreateCaptureDevicesAndStreamsForPreset (NULL,
*(cel->kFigRecorderCapturePreset_VideoRecording), NULL,
&device, &stream, NULL, NULL);
if (status == kFigResourceBusy)
goto device_busy;
else if (status != kFigSuccess)
goto unexpected_error;
device_base = mt->FigCaptureDeviceGetFigBaseObject (device);
device_vt = cm->FigBaseObjectGetVTable (device_base);
stream_base = mt->FigCaptureStreamGetFigBaseObject (stream);
stream_vt = cm->FigBaseObjectGetVTable (stream_base);
stream_iface = stream_vt->derived;
status = stream_vt->base->CopyProperty (stream_base,
*(mt->kFigCaptureStreamProperty_BufferQueue), NULL, &queue);
if (status != kFigSuccess)
goto unexpected_error;
self->has_pending = FALSE;
cm->FigBufferQueueSetValidationCallback (queue,
gst_iphone_camera_src_validate, self);
self->ctx = ctx;
self->device = device;
self->device_iface_base = device_vt->base;
self->stream = stream;
self->stream_iface_base = stream_vt->base;
self->stream_iface = stream_iface;
self->queue = queue;
self->duration = GST_CLOCK_TIME_NONE;
return TRUE;
/* ERRORS */
api_error:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("API error"),
("%s", error->message));
g_clear_error (&error);
goto any_error;
}
device_busy:
{
GST_ELEMENT_ERROR (self, RESOURCE, BUSY,
("device is already in use"), (NULL));
goto any_error;
}
unexpected_error:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("unexpected error while opening device (%d)", status), (NULL));
goto any_error;
}
any_error:
{
if (stream != NULL)
CFRelease (stream);
if (device != NULL)
CFRelease (device);
if (ctx != NULL) {
cm->FigBufferQueueRelease (queue);
g_object_unref (ctx);
}
return FALSE;
}
}
static void
gst_iphone_camera_src_close_device (GstIPhoneCameraSrc * self)
{
gst_iphone_camera_src_release_device_caps_and_formats (self);
self->stream_iface->Stop (self->stream);
self->stream_iface = NULL;
self->stream_iface_base->Finalize (self->stream);
self->stream_iface_base = NULL;
CFRelease (self->stream);
self->stream = NULL;
self->device_iface_base->Finalize (self->device);
self->device_iface_base = NULL;
CFRelease (self->device);
self->device = NULL;
self->ctx->cm->FigBufferQueueRelease (self->queue);
self->queue = NULL;
g_object_unref (self->ctx);
self->ctx = NULL;
}
static void
gst_iphone_camera_src_ensure_device_caps_and_formats (GstIPhoneCameraSrc * self)
{
FigStatus status;
CFArrayRef iformats = NULL;
CFIndex format_count, i;
if (self->device_caps != NULL)
goto already_probed;
self->device_caps = gst_caps_new_empty ();
self->device_formats =
g_array_new (FALSE, FALSE, sizeof (GstIPhoneCameraFormat));
status = self->device_iface_base->CopyProperty (self->device,
*(self->ctx->mt->kFigCaptureDeviceProperty_ImagerSupportedFormatsArray),
NULL, (CFTypeRef *) & iformats);
if (status != kFigSuccess)
goto beach;
format_count = CFArrayGetCount (iformats);
GST_DEBUG_OBJECT (self, "device supports %d formats", (gint) format_count);
for (i = 0; i != format_count; i++) {
CFDictionaryRef iformat;
GstIPhoneCameraFormat format;
iformat = CFArrayGetValueAtIndex (iformats, i);
if (gst_iphone_camera_src_parse_imager_format (self, i, iformat, &format)) {
gst_caps_append_structure (self->device_caps,
gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, format.fourcc,
"width", G_TYPE_INT, format.width,
"height", G_TYPE_INT, format.height,
"framerate", GST_TYPE_FRACTION, format.fps_n, format.fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL));
g_array_append_val (self->device_formats, format);
} else {
GST_WARNING_OBJECT (self, "ignoring unknown format #%d", (gint) i);
}
}
CFRelease (iformats);
already_probed:
beach:
return;
}
static void
gst_iphone_camera_src_release_device_caps_and_formats (GstIPhoneCameraSrc *
self)
{
if (self->device_caps != NULL) {
gst_caps_unref (self->device_caps);
self->device_caps = NULL;
}
if (self->device_formats != NULL) {
g_array_free (self->device_formats, TRUE);
self->device_formats = NULL;
}
}
static gboolean
gst_iphone_camera_src_select_format (GstIPhoneCameraSrc * self,
GstIPhoneCameraFormat * format)
{
gboolean result = FALSE;
GstMTApi *mt = self->ctx->mt;
FigStatus status;
SInt32 framerate;
status = gst_iphone_camera_src_set_device_property_i32 (self,
*(mt->kFigCaptureDeviceProperty_ImagerFormatDescription), format->index);
if (status != kFigSuccess)
goto beach;
framerate = format->fps_n / format->fps_d;
status = gst_iphone_camera_src_set_device_property_i32 (self,
*(mt->kFigCaptureDeviceProperty_ImagerFrameRate), framerate);
if (status != kFigSuccess)
goto beach;
status = gst_iphone_camera_src_set_device_property_i32 (self,
*(mt->kFigCaptureDeviceProperty_ImagerMinimumFrameRate), framerate);
if (status != kFigSuccess)
goto beach;
status = gst_iphone_camera_src_set_device_property_cstr (self,
"ColorRange", "ColorRangeSDVideo");
if (status != kFigSuccess)
goto beach;
status = self->stream_iface->Start (self->stream);
if (status != kFigSuccess)
goto beach;
GST_DEBUG_OBJECT (self, "configured format %d (%d x %d @ %d Hz)",
format->index, format->width, format->height, (gint) framerate);
self->duration =
gst_util_uint64_scale (GST_SECOND, format->fps_d, format->fps_n);
result = TRUE;
beach:
return result;
}
static gboolean
gst_iphone_camera_src_parse_imager_format (GstIPhoneCameraSrc * self,
guint index, CFDictionaryRef imager_format, GstIPhoneCameraFormat * format)
{
GstCMApi *cm = self->ctx->cm;
GstMTApi *mt = self->ctx->mt;
const FigFormatDescription *desc;
FigVideoDimensions dim;
UInt32 subtype;
CFNumberRef framerate_value;
SInt32 fps_n;
format->index = index;
desc = CFDictionaryGetValue (imager_format,
*(mt->kFigImagerSupportedFormat_FormatDescription));
dim = cm->FigVideoFormatDescriptionGetDimensions (desc);
format->width = dim.width;
format->height = dim.height;
subtype = cm->FigFormatDescriptionGetMediaSubType (desc);
switch (subtype) {
case kComponentVideoUnsigned:
format->video_format = GST_VIDEO_FORMAT_YUY2;
format->fourcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
break;
case kYUV420vCodecType:
format->video_format = GST_VIDEO_FORMAT_I420;
format->fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
break;
default:
goto unsupported_format;
}
framerate_value = CFDictionaryGetValue (imager_format,
*(mt->kFigImagerSupportedFormat_MaxFrameRate));
CFNumberGetValue (framerate_value, kCFNumberSInt32Type, &fps_n);
format->fps_n = fps_n;
format->fps_d = 1;
return TRUE;
unsupported_format:
return FALSE;
}
static FigStatus
gst_iphone_camera_src_set_device_property_i32 (GstIPhoneCameraSrc * self,
CFStringRef name, SInt32 value)
{
FigStatus status;
CFNumberRef number;
number = CFNumberCreate (NULL, kCFNumberSInt32Type, &value);
status = self->device_iface_base->SetProperty (self->device, name, number);
CFRelease (number);
return status;
}
static FigStatus
gst_iphone_camera_src_set_device_property_cstr (GstIPhoneCameraSrc * self,
const gchar * name, const gchar * value)
{
FigStatus status;
CFStringRef name_str, value_str;
name_str = CFStringCreateWithCStringNoCopy (NULL, name,
kCFStringEncodingUTF8, kCFAllocatorNull);
value_str = CFStringCreateWithCStringNoCopy (NULL, value,
kCFStringEncodingUTF8, kCFAllocatorNull);
status = self->device_iface_base->SetProperty (self->device,
name_str, value_str);
CFRelease (value_str);
CFRelease (name_str);
return status;
}
static void
gst_iphone_camera_src_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"iPhone camera source",
"Source/Video",
"Stream data from iPhone camera sensor",
"Ole André Vadla Ravnås <oleavr@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
}
static void
gst_iphone_camera_src_class_init (GstIPhoneCameraSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
gobject_class->dispose = gst_iphone_camera_src_dispose;
gobject_class->finalize = gst_iphone_camera_src_finalize;
gobject_class->get_property = gst_iphone_camera_src_get_property;
gobject_class->set_property = gst_iphone_camera_src_set_property;
gstelement_class->change_state = gst_iphone_camera_src_change_state;
gstbasesrc_class->get_caps = gst_iphone_camera_src_get_caps;
gstbasesrc_class->set_caps = gst_iphone_camera_src_set_caps;
gstbasesrc_class->start = gst_iphone_camera_src_start;
gstbasesrc_class->stop = gst_iphone_camera_src_stop;
gstbasesrc_class->query = gst_iphone_camera_src_query;
gstbasesrc_class->unlock = gst_iphone_camera_src_unlock;
gstbasesrc_class->unlock_stop = gst_iphone_camera_src_unlock_stop;
gstpushsrc_class->create = gst_iphone_camera_src_create;
g_object_class_install_property (gobject_class, PROP_DO_STATS,
g_param_spec_boolean ("do-stats", "Enable statistics",
"Enable logging of statistics", DEFAULT_DO_STATS,
G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (gst_iphone_camera_src_debug, "iphonecamerasrc",
0, "iPhone video source");
}

View file

@ -0,0 +1,79 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_IPHONE_CAMERA_SRC_H__
#define __GST_IPHONE_CAMERA_SRC_H__
#include <gst/base/gstpushsrc.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_TYPE_IPHONE_CAMERA_SRC \
(gst_iphone_camera_src_get_type ())
#define GST_IPHONE_CAMERA_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_IPHONE_CAMERA_SRC, GstIPhoneCameraSrc))
#define GST_IPHONE_CAMERA_SRC_CAST(obj) \
((GstIPhoneCameraSrc *) (obj))
#define GST_IPHONE_CAMERA_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_IPHONE_CAMERA_SRC, GstIPhoneCameraSrcClass))
#define GST_IS_IPHONE_CAMERA_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_IPHONE_CAMERA_SRC))
#define GST_IS_IPHONE_CAMERA_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_IPHONE_CAMERA_SRC))
typedef struct _GstIPhoneCameraSrc GstIPhoneCameraSrc;
typedef struct _GstIPhoneCameraSrcClass GstIPhoneCameraSrcClass;
struct _GstIPhoneCameraSrc
{
GstPushSrc push_src;
gboolean do_stats;
GstCoreMediaCtx *ctx;
FigCaptureDeviceRef device;
FigBaseIface *device_iface_base;
FigCaptureStreamRef stream;
FigBaseIface *stream_iface_base;
FigCaptureStreamIface *stream_iface;
FigBufferQueueRef queue;
GstCaps *device_caps;
GArray *device_formats;
GstClockTime duration;
volatile gboolean running;
guint64 offset;
GCond *cond;
volatile gboolean has_pending;
};
struct _GstIPhoneCameraSrcClass
{
GstPushSrcClass parent_class;
};
GType gst_iphone_camera_src_get_type (void);
G_END_DECLS
#endif /* __GST_IPHONE_CAMERA_SRC_H__ */

232
sys/applemedia/mioapi.c Normal file
View file

@ -0,0 +1,232 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "mioapi.h"
#include "dynapi-internal.h"
#define MIO_FRAMEWORK_PATH "/System/Library/PrivateFrameworks/" \
"CoreMediaIOServices.framework/CoreMediaIOServices"
G_DEFINE_TYPE (GstMIOApi, gst_mio_api, GST_TYPE_DYN_API);
static void
gst_mio_api_init (GstMIOApi * self)
{
}
static void
gst_mio_api_class_init (GstMIOApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstMIOApi, name)
GstMIOApi *
gst_mio_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (TundraGraphCreate),
SYM_SPEC (TundraGraphRelease),
SYM_SPEC (TundraGraphCreateNode),
SYM_SPEC (TundraGraphGetNodeInfo),
SYM_SPEC (TundraGraphSetProperty),
SYM_SPEC (TundraGraphConnectNodeInput),
SYM_SPEC (TundraGraphInitialize),
SYM_SPEC (TundraGraphUninitialize),
SYM_SPEC (TundraGraphStart),
SYM_SPEC (TundraGraphStop),
SYM_SPEC (TundraObjectGetPropertyDataSize),
SYM_SPEC (TundraObjectGetPropertyData),
SYM_SPEC (TundraObjectIsPropertySettable),
SYM_SPEC (TundraObjectSetPropertyData),
SYM_SPEC (kTundraSampleBufferAttachmentKey_SequenceNumber),
SYM_SPEC (kTundraSampleBufferAttachmentKey_HostTime),
{NULL, 0},
};
return _gst_dyn_api_new (gst_mio_api_get_type (), MIO_FRAMEWORK_PATH, symbols,
error);
}
gpointer
gst_mio_object_get_pointer (gint obj, TundraTargetSpec * pspec, GstMIOApi * mio)
{
gpointer ptr;
guint sz;
TundraStatus status;
sz = sizeof (ptr);
status = mio->TundraObjectGetPropertyData (obj, pspec, 0, NULL, &sz, &ptr);
if (status != kTundraSuccess)
goto error;
return ptr;
error:
return NULL;
}
gchar *
gst_mio_object_get_string (gint obj, TundraTargetSpec * pspec, GstMIOApi * mio)
{
gchar *result = NULL;
CFStringRef str;
guint size;
TundraStatus status;
CFRange range;
size = sizeof (str);
status = mio->TundraObjectGetPropertyData (obj, pspec, 0, NULL, &size, &str);
if (status != kTundraSuccess)
goto error;
range.location = 0;
range.length = CFStringGetLength (str);
result = g_malloc0 (range.length + 1);
CFStringGetBytes (str, range, kCFStringEncodingUTF8, 0, FALSE,
(UInt8 *) result, range.length, NULL);
CFRelease (str);
return result;
error:
return NULL;
}
guint32
gst_mio_object_get_uint32 (gint obj, TundraTargetSpec * pspec, GstMIOApi * mio)
{
guint32 val;
guint size;
TundraStatus status;
size = sizeof (val);
status = mio->TundraObjectGetPropertyData (obj, pspec, 0, NULL, &size, &val);
if (status != kTundraSuccess)
goto error;
return val;
error:
return 0;
}
GArray *
gst_mio_object_get_array (gint obj, TundraTargetSpec * pspec,
guint element_size, GstMIOApi * mio)
{
return gst_mio_object_get_array_full (obj, pspec, 0, NULL, element_size, mio);
}
GArray *
gst_mio_object_get_array_full (gint obj, TundraTargetSpec * pspec,
guint ctx_size, gpointer ctx, guint element_size, GstMIOApi * mio)
{
GArray *arr = NULL;
guint size, num_elements;
TundraStatus status;
status = mio->TundraObjectGetPropertyDataSize (obj, pspec, ctx_size, ctx,
&size);
if (status != kTundraSuccess)
goto error;
else if (size % element_size != 0)
goto error;
num_elements = size / element_size;
arr = g_array_sized_new (FALSE, TRUE, element_size, num_elements);
g_array_set_size (arr, num_elements);
status = mio->TundraObjectGetPropertyData (obj, pspec, ctx_size, ctx,
&size, arr->data);
if (status != kTundraSuccess)
goto error;
return arr;
error:
if (arr != NULL)
g_array_free (arr, TRUE);
return NULL;
}
gchar *
gst_mio_object_get_fourcc (gint obj, TundraTargetSpec * pspec, GstMIOApi * mio)
{
guint32 fcc;
guint size;
TundraStatus status;
size = sizeof (fcc);
status = mio->TundraObjectGetPropertyData (obj, pspec, 0, NULL, &size, &fcc);
if (status != kTundraSuccess)
goto error;
return gst_mio_fourcc_to_string (fcc);
error:
return NULL;
}
gpointer
gst_mio_object_get_raw (gint obj, TundraTargetSpec * pspec, guint * size,
GstMIOApi * mio)
{
gpointer data = NULL;
guint sz;
TundraStatus status;
status = mio->TundraObjectGetPropertyDataSize (obj, pspec, 0, NULL, &sz);
if (status != kTundraSuccess)
goto error;
data = g_malloc0 (sz);
status = mio->TundraObjectGetPropertyData (obj, pspec, 0, NULL, &sz, data);
if (status != kTundraSuccess)
goto error;
if (size != NULL)
*size = sz;
return data;
error:
g_free (data);
if (size != NULL)
*size = 0;
return NULL;
}
gchar *
gst_mio_fourcc_to_string (guint32 fcc)
{
gchar *result;
result = g_malloc0 (5);
result[0] = (fcc >> 24) & 0xff;
result[1] = (fcc >> 16) & 0xff;
result[2] = (fcc >> 8) & 0xff;
result[3] = (fcc >> 0) & 0xff;
return result;
}

248
sys/applemedia/mioapi.h Normal file
View file

@ -0,0 +1,248 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MIO_API_H__
#define __GST_MIO_API_H__
#include "cmapi.h"
#include <CoreFoundation/CoreFoundation.h>
G_BEGIN_DECLS
typedef struct _GstMIOApi GstMIOApi;
typedef struct _GstMIOApiClass GstMIOApiClass;
#define TUNDRA_SYSTEM_OBJECT_ID 1
typedef int TundraObjectID;
typedef int TundraDeviceID;
typedef int TundraUnitID;
typedef enum _TundraStatus TundraStatus;
typedef enum _TundraVendor TundraVendor;
typedef enum _TundraScope TundraScope;
typedef enum _TundraUnit TundraUnit;
typedef enum _TundraProperty TundraProperty;
typedef enum _TundraDeviceTransportType TundraDeviceTransportType;
typedef struct _TundraTargetSpec TundraTargetSpec;
typedef struct _TundraFramerate TundraFramerate;
typedef struct _TundraGraph TundraGraph;
typedef struct _TundraNode TundraNode;
typedef struct _TundraOutputDelegate TundraOutputDelegate;
enum _TundraStatus
{
kTundraSuccess = 0,
kTundraNotSupported = -67456
};
enum _TundraVendor
{
kTundraVendorApple = 'appl'
};
enum _TundraScope
{
kTundraScopeGlobal = 'glob',
kTundraScopeDAL = 'dal ',
kTundraScope2PRC = '2prc', /* TODO: Investigate this one */
kTundraScopeInput = 'inpt',
kTundraScopeVSyn = 'vsyn'
};
enum _TundraUnit
{
kTundraUnitInput = 'tinp',
kTundraUnitOutput = 'tout',
kTundraUnitSync = 'tefc'
};
enum _TundraProperty
{
kTundraSystemPropertyDevices = 'dev#',
kTundraObjectPropertyClass = 'clas',
kTundraObjectPropertyCreator = 'oplg',
kTundraObjectPropertyName = 'lnam',
kTundraObjectPropertyUID = 'uid ',
kTundraObjectPropertyVendor = 'lmak',
kTundraDevicePropertyConfigApp = 'capp', /* CFString: com.apple.mediaio.TundraDeviceSetup */
kTundraDevicePropertyExclusiveMode = 'ixna',
kTundraDevicePropertyHogMode = 'oink',
kTundraDevicePropertyModelUID = 'muid',
kTundraDevicePropertyStreams = 'stm#',
kTundraDevicePropertySuspendedByUser = 'sbyu',
kTundraDevicePropertyTransportType = 'tran',
kTundraStreamPropertyFormatDescriptions = 'pfta',
kTundraStreamPropertyFormatDescription = 'pft ',
kTundraStreamPropertyFrameRates = 'nfr#',
kTundraStreamPropertyFrameRate = 'nfrt'
};
struct _TundraTargetSpec
{
FourCharCode name;
FourCharCode scope;
FourCharCode vendor;
FourCharCode unk1;
FourCharCode unk2;
};
struct _TundraFramerate
{
gdouble value;
};
enum _TundraUnitProperty
{
kTundraInputPropertyDeviceID = 302,
kTundraOutputPropertyDelegate = 5903,
kTundraInputUnitProperty_SourcePath = 6780,
kTundraSyncPropertyClockProvider = 7100,
kTundraSyncPropertyMasterSynchronizer = 7102,
kTundraSyncPropertySynchronizationDirection = 7104
};
enum _TundraDeviceTransportType
{
kTundraDeviceTransportInvalid = 0,
kTundraDeviceTransportBuiltin = 'bltn',
kTundraDeviceTransportScreen = 'scrn',
kTundraDeviceTransportUSB = 'usb ',
};
typedef TundraStatus (* TundraOutputRenderFunc) (gpointer instance,
gpointer unk1, gpointer unk2, gpointer unk3, FigSampleBuffer * sampleBuf);
typedef TundraStatus (* TundraOutputInitializeFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputUninitializeFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputStartFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputStopFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputResetFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputDeallocateFunc) (gpointer instance);
typedef gboolean (* TundraOutputCanRenderNowFunc) (gpointer instance,
guint * unk);
typedef CFArrayRef (* TundraOutputAvailableFormatsFunc) (gpointer instance,
gboolean ensureOnly);
typedef TundraStatus (* TundraOutputCopyClockFunc) (gpointer instance);
typedef TundraStatus (* TundraOutputGetPropertyInfoFunc) (gpointer instance,
guint propId);
typedef TundraStatus (* TundraOutputGetPropertyFunc) (gpointer instance,
guint propId);
typedef TundraStatus (* TundraOutputSetPropertyFunc) (gpointer instance,
guint propId);
#pragma pack(push, 1)
struct _TundraOutputDelegate
{
int unk1;
gpointer instance;
TundraOutputRenderFunc Render;
TundraOutputInitializeFunc Initialize;
TundraOutputUninitializeFunc Uninitialize;
TundraOutputStartFunc Start;
TundraOutputStopFunc Stop;
TundraOutputResetFunc Reset;
TundraOutputDeallocateFunc Deallocate;
TundraOutputCanRenderNowFunc CanRenderNow;
TundraOutputAvailableFormatsFunc AvailableFormats;
TundraOutputCopyClockFunc CopyClock;
TundraOutputGetPropertyInfoFunc GetPropertyInfo;
TundraOutputGetPropertyFunc GetProperty;
TundraOutputSetPropertyFunc SetProperty;
};
#pragma pack(pop)
struct _GstMIOApi
{
GstDynApi parent;
TundraStatus (* TundraGraphCreate) (CFAllocatorRef allocator,
TundraGraph ** graph);
void (* TundraGraphRelease) (TundraGraph * graph);
TundraStatus (* TundraGraphCreateNode) (TundraGraph * graph,
gint nodeId, UInt32 unk1, UInt32 unk2, TundraTargetSpec * spec,
UInt32 unk3, TundraUnitID * node);
TundraStatus (* TundraGraphGetNodeInfo) (TundraGraph * graph,
gint nodeId, UInt32 unk1, UInt32 unk2, UInt32 unk3, UInt32 unk4,
gpointer * info);
TundraStatus (* TundraGraphSetProperty) (TundraGraph * graph,
gint nodeId, UInt32 unk1, guint propId, UInt32 unk2, UInt32 unk3,
gpointer data, guint size);
TundraStatus (* TundraGraphConnectNodeInput) (TundraGraph * graph,
TundraUnitID from_node, guint from_bus,
TundraUnitID to_node, guint to_bus);
TundraStatus (* TundraGraphInitialize) (TundraGraph * graph);
TundraStatus (* TundraGraphUninitialize) (TundraGraph * graph);
TundraStatus (* TundraGraphStart) (TundraGraph * graph);
TundraStatus (* TundraGraphStop) (TundraGraph * graph);
TundraStatus (* TundraObjectGetPropertyDataSize) (TundraObjectID obj,
TundraTargetSpec * spec, UInt32 contextSize, void * context, guint * size);
TundraStatus (* TundraObjectGetPropertyData) (TundraObjectID obj,
TundraTargetSpec * spec, UInt32 contextSize, void * context, guint * size,
gpointer data);
TundraStatus (* TundraObjectIsPropertySettable) (TundraObjectID obj,
TundraTargetSpec * spec, Boolean *isSettable);
TundraStatus (* TundraObjectSetPropertyData) (TundraObjectID obj,
TundraTargetSpec * spec, gpointer unk1, gpointer unk2, guint size,
gpointer data);
CFStringRef * kTundraSampleBufferAttachmentKey_SequenceNumber;
CFStringRef * kTundraSampleBufferAttachmentKey_HostTime;
};
struct _GstMIOApiClass
{
GstDynApiClass parent_class;
};
GstMIOApi * gst_mio_api_obtain (GError ** error);
gpointer gst_mio_object_get_pointer (gint obj, TundraTargetSpec * pspec,
GstMIOApi * mio);
gchar * gst_mio_object_get_string (gint obj, TundraTargetSpec * pspec,
GstMIOApi * mio);
guint32 gst_mio_object_get_uint32 (gint obj, TundraTargetSpec * pspec,
GstMIOApi * mio);
gchar * gst_mio_object_get_fourcc (gint obj, TundraTargetSpec * pspec,
GstMIOApi * mio);
GArray * gst_mio_object_get_array (gint obj, TundraTargetSpec * pspec,
guint element_size, GstMIOApi * mio);
GArray * gst_mio_object_get_array_full (gint obj, TundraTargetSpec * pspec,
guint ctx_size, gpointer ctx, guint element_size, GstMIOApi * mio);
gpointer gst_mio_object_get_raw (gint obj, TundraTargetSpec * pspec,
guint * size, GstMIOApi * mio);
gchar * gst_mio_fourcc_to_string (guint32 fcc);
G_END_DECLS
#endif

View file

@ -0,0 +1,846 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
* 2009 Knut Inge Hvidsten <knut.inge.hvidsten@tandberg.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "miovideodevice.h"
#include <gst/video/video.h>
#include <unistd.h>
GST_DEBUG_CATEGORY_EXTERN (gst_mio_video_src_debug);
#define GST_CAT_DEFAULT gst_mio_video_src_debug
enum
{
PROP_0,
PROP_CONTEXT,
PROP_HANDLE,
PROP_UID,
PROP_NAME,
PROP_TRANSPORT
};
G_DEFINE_TYPE (GstMIOVideoDevice, gst_mio_video_device, G_TYPE_OBJECT);
typedef struct _GstMIOVideoFormat GstMIOVideoFormat;
typedef struct _GstMIOSetFormatCtx GstMIOSetFormatCtx;
typedef struct _GstMIOFindRateCtx GstMIOFindRateCtx;
struct _GstMIOVideoFormat
{
TundraObjectID stream;
FigFormatDescription *desc;
UInt32 type;
FigVideoDimensions dim;
};
struct _GstMIOSetFormatCtx
{
UInt32 format;
gint width, height;
gint fps_n, fps_d;
gboolean success;
};
struct _GstMIOFindRateCtx
{
gdouble needle;
gdouble closest_match;
gboolean success;
};
static void gst_mio_video_device_collect_format (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, gpointer user_data);
static GstStructure *gst_mio_video_device_format_basics_to_structure
(GstMIOVideoDevice * self, GstMIOVideoFormat * format);
static gboolean gst_mio_video_device_add_framerates_to_structure
(GstMIOVideoDevice * self, GstMIOVideoFormat * format, GstStructure * s);
static void gst_mio_video_device_add_pixel_aspect_to_structure
(GstMIOVideoDevice * self, GstMIOVideoFormat * format, GstStructure * s);
static void gst_mio_video_device_append_framerate (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, TundraFramerate * rate, gpointer user_data);
static void gst_mio_video_device_framerate_to_fraction_value
(TundraFramerate * rate, GValue * fract);
static gdouble gst_mio_video_device_round_to_whole_hundreths (gdouble value);
static void gst_mio_video_device_guess_pixel_aspect_ratio
(gint width, gint height, gint * par_width, gint * par_height);
static void gst_mio_video_device_activate_matching_format
(GstMIOVideoDevice * self, GstMIOVideoFormat * format, gpointer user_data);
static void gst_mio_video_device_find_closest_framerate
(GstMIOVideoDevice * self, GstMIOVideoFormat * format,
TundraFramerate * rate, gpointer user_data);
typedef void (*GstMIOVideoDeviceEachFormatFunc) (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, gpointer user_data);
typedef void (*GstMIOVideoDeviceEachFramerateFunc) (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, TundraFramerate * rate, gpointer user_data);
static void gst_mio_video_device_formats_foreach (GstMIOVideoDevice * self,
GstMIOVideoDeviceEachFormatFunc func, gpointer user_data);
static void gst_mio_video_device_format_framerates_foreach
(GstMIOVideoDevice * self, GstMIOVideoFormat * format,
GstMIOVideoDeviceEachFramerateFunc func, gpointer user_data);
static gint gst_mio_video_device_compare (GstMIOVideoDevice * a,
GstMIOVideoDevice * b);
static gint gst_mio_video_device_calculate_score (GstMIOVideoDevice * device);
static void
gst_mio_video_device_init (GstMIOVideoDevice * self)
{
}
static void
gst_mio_video_device_dispose (GObject * object)
{
GstMIOVideoDevice *self = GST_MIO_VIDEO_DEVICE_CAST (object);
if (self->cached_caps != NULL) {
gst_caps_unref (self->cached_caps);
self->cached_caps = NULL;
}
G_OBJECT_CLASS (gst_mio_video_device_parent_class)->dispose (object);
}
static void
gst_mio_video_device_finalize (GObject * object)
{
GstMIOVideoDevice *self = GST_MIO_VIDEO_DEVICE_CAST (object);
g_free (self->cached_uid);
g_free (self->cached_name);
G_OBJECT_CLASS (gst_mio_video_device_parent_class)->finalize (object);
}
static void
gst_mio_video_device_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstMIOVideoDevice *self = GST_MIO_VIDEO_DEVICE (object);
switch (prop_id) {
case PROP_CONTEXT:
g_value_set_pointer (value, self->ctx);
break;
case PROP_HANDLE:
g_value_set_int (value, gst_mio_video_device_get_handle (self));
break;
case PROP_UID:
g_value_set_string (value, gst_mio_video_device_get_uid (self));
break;
case PROP_NAME:
g_value_set_string (value, gst_mio_video_device_get_name (self));
break;
case PROP_TRANSPORT:
g_value_set_uint (value, gst_mio_video_device_get_transport_type (self));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_mio_video_device_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstMIOVideoDevice *self = GST_MIO_VIDEO_DEVICE (object);
switch (prop_id) {
case PROP_CONTEXT:
self->ctx = g_value_get_pointer (value);
break;
case PROP_HANDLE:
self->handle = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
TundraObjectID
gst_mio_video_device_get_handle (GstMIOVideoDevice * self)
{
return self->handle;
}
const gchar *
gst_mio_video_device_get_uid (GstMIOVideoDevice * self)
{
if (self->cached_uid == NULL) {
TundraTargetSpec pspec = { 0, };
pspec.name = kTundraObjectPropertyUID;
pspec.scope = kTundraScopeGlobal;
self->cached_uid =
gst_mio_object_get_string (self->handle, &pspec, self->ctx->mio);
}
return self->cached_uid;
}
const gchar *
gst_mio_video_device_get_name (GstMIOVideoDevice * self)
{
if (self->cached_name == NULL) {
TundraTargetSpec pspec = { 0, };
pspec.name = kTundraObjectPropertyName;
pspec.scope = kTundraScopeGlobal;
self->cached_name =
gst_mio_object_get_string (self->handle, &pspec, self->ctx->mio);
}
return self->cached_name;
}
TundraDeviceTransportType
gst_mio_video_device_get_transport_type (GstMIOVideoDevice * self)
{
if (self->cached_transport == kTundraDeviceTransportInvalid) {
TundraTargetSpec pspec = { 0, };
pspec.name = kTundraDevicePropertyTransportType;
pspec.scope = kTundraScopeGlobal;
self->cached_transport =
gst_mio_object_get_uint32 (self->handle, &pspec, self->ctx->mio);
}
return self->cached_transport;
}
gboolean
gst_mio_video_device_open (GstMIOVideoDevice * self)
{
/* nothing for now */
return TRUE;
}
void
gst_mio_video_device_close (GstMIOVideoDevice * self)
{
/* nothing for now */
}
GstCaps *
gst_mio_video_device_get_available_caps (GstMIOVideoDevice * self)
{
if (self->cached_caps == NULL) {
GstCaps *caps;
caps = gst_caps_new_empty ();
gst_mio_video_device_formats_foreach (self,
gst_mio_video_device_collect_format, caps);
self->cached_caps = caps;
}
return self->cached_caps;
}
static void
gst_mio_video_device_collect_format (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, gpointer user_data)
{
GstCaps *caps = user_data;
GstStructure *s;
s = gst_mio_video_device_format_basics_to_structure (self, format);
if (s == NULL)
goto unsupported_format;
if (!gst_mio_video_device_add_framerates_to_structure (self, format, s))
goto no_framerates;
gst_mio_video_device_add_pixel_aspect_to_structure (self, format, s);
gst_caps_append_structure (caps, s);
return;
/* ERRORS */
unsupported_format:
{
gchar *fcc;
fcc = gst_mio_fourcc_to_string (format->type);
GST_WARNING ("skipping unsupported format %s", fcc);
g_free (fcc);
return;
}
no_framerates:
{
GST_WARNING ("no framerates?");
gst_structure_free (s);
return;
}
}
static GstStructure *
gst_mio_video_device_format_basics_to_structure (GstMIOVideoDevice * self,
GstMIOVideoFormat * format)
{
GstStructure *s;
switch (format->type) {
case kCVPixelFormatType_422YpCbCr8:
case kCVPixelFormatType_422YpCbCr8Deprecated:
{
guint fcc;
if (format->type == kCVPixelFormatType_422YpCbCr8)
fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
else
fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
s = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fcc,
"width", G_TYPE_INT, format->dim.width,
"height", G_TYPE_INT, format->dim.height, NULL);
break;
}
case kFigVideoCodecType_JPEG_OpenDML:
{
s = gst_structure_new ("image/jpeg",
"width", G_TYPE_INT, format->dim.width,
"height", G_TYPE_INT, format->dim.height, NULL);
break;
}
default:
s = NULL;
break;
}
return s;
}
static gboolean
gst_mio_video_device_add_framerates_to_structure (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, GstStructure * s)
{
GValue rates = { 0, };
const GValue *rates_value;
g_value_init (&rates, GST_TYPE_LIST);
gst_mio_video_device_format_framerates_foreach (self, format,
gst_mio_video_device_append_framerate, &rates);
if (gst_value_list_get_size (&rates) == 0)
goto no_framerates;
if (gst_value_list_get_size (&rates) > 1)
rates_value = &rates;
else
rates_value = gst_value_list_get_value (&rates, 0);
gst_structure_set_value (s, "framerate", rates_value);
g_value_unset (&rates);
return TRUE;
/* ERRORS */
no_framerates:
{
g_value_unset (&rates);
return FALSE;
}
}
static void
gst_mio_video_device_add_pixel_aspect_to_structure (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, GstStructure * s)
{
gint par_width, par_height;
gst_mio_video_device_guess_pixel_aspect_ratio
(format->dim.width, format->dim.height, &par_width, &par_height);
gst_structure_set (s, "pixel-aspect-ratio",
GST_TYPE_FRACTION, par_width, par_height, NULL);
}
static void
gst_mio_video_device_append_framerate (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, TundraFramerate * rate, gpointer user_data)
{
GValue *rates = user_data;
GValue value = { 0, };
g_value_init (&value, GST_TYPE_FRACTION);
gst_mio_video_device_framerate_to_fraction_value (rate, &value);
gst_value_list_append_value (rates, &value);
g_value_unset (&value);
}
static void
gst_mio_video_device_framerate_to_fraction_value (TundraFramerate * rate,
GValue * fract)
{
gdouble rounded;
gint n, d;
rounded = gst_mio_video_device_round_to_whole_hundreths (rate->value);
gst_util_double_to_fraction (rounded, &n, &d);
gst_value_set_fraction (fract, n, d);
}
static gdouble
gst_mio_video_device_round_to_whole_hundreths (gdouble value)
{
gdouble m, x, y, z;
m = 0.01;
x = value;
y = floor ((x / m) + 0.5);
z = y * m;
return z;
}
static void
gst_mio_video_device_guess_pixel_aspect_ratio (gint width, gint height,
gint * par_width, gint * par_height)
{
/*
* As we dont have access to the actual pixel aspect, we will try to do a
* best-effort guess. The guess is based on most sensors being either 4/3
* or 16/9, and most pixel aspects being close to 1/1.
*/
if (width == 768 && height == 448) { /* special case for w448p */
*par_width = 28;
*par_height = 27;
} else {
if (((gdouble) width / (gdouble) height) < 1.2778) {
*par_width = 12;
*par_height = 11;
} else {
*par_width = 1;
*par_height = 1;
}
}
}
gboolean
gst_mio_video_device_set_caps (GstMIOVideoDevice * self, GstCaps * caps)
{
GstVideoFormat format;
GstMIOSetFormatCtx ctx = { 0, };
if (gst_video_format_parse_caps (caps, &format, &ctx.width, &ctx.height)) {
if (format == GST_VIDEO_FORMAT_UYVY)
ctx.format = kCVPixelFormatType_422YpCbCr8;
else if (format == GST_VIDEO_FORMAT_YUY2)
ctx.format = kCVPixelFormatType_422YpCbCr8Deprecated;
else
g_assert_not_reached ();
} else {
GstStructure *s;
s = gst_caps_get_structure (caps, 0);
g_assert (gst_structure_has_name (s, "image/jpeg"));
gst_structure_get_int (s, "width", &ctx.width);
gst_structure_get_int (s, "height", &ctx.height);
ctx.format = kFigVideoCodecType_JPEG_OpenDML;
}
gst_video_parse_caps_framerate (caps, &ctx.fps_n, &ctx.fps_d);
gst_mio_video_device_formats_foreach (self,
gst_mio_video_device_activate_matching_format, &ctx);
return ctx.success;
}
static void
gst_mio_video_device_activate_matching_format (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, gpointer user_data)
{
GstMIOSetFormatCtx *ctx = user_data;
GstMIOFindRateCtx find_ctx;
TundraTargetSpec spec = { 0, };
TundraStatus status;
if (format->type != ctx->format)
return;
else if (format->dim.width != ctx->width)
return;
else if (format->dim.height != ctx->height)
return;
find_ctx.needle = (gdouble) ctx->fps_n / (gdouble) ctx->fps_d;
find_ctx.closest_match = 0.0;
find_ctx.success = FALSE;
gst_mio_video_device_format_framerates_foreach (self, format,
gst_mio_video_device_find_closest_framerate, &find_ctx);
if (!find_ctx.success)
goto no_matching_framerate_found;
spec.scope = kTundraScopeInput;
spec.name = kTundraStreamPropertyFormatDescription;
status = self->ctx->mio->TundraObjectSetPropertyData (format->stream, &spec,
NULL, NULL, sizeof (format->desc), &format->desc);
if (status != kTundraSuccess)
goto failed_to_set_format;
spec.name = kTundraStreamPropertyFrameRate;
status = self->ctx->mio->TundraObjectSetPropertyData (format->stream, &spec,
NULL, NULL, sizeof (find_ctx.closest_match), &find_ctx.closest_match);
if (status != kTundraSuccess)
goto failed_to_set_framerate;
self->selected_format = format->desc;
self->selected_fps_n = ctx->fps_n;
self->selected_fps_d = ctx->fps_d;
ctx->success = TRUE;
return;
/* ERRORS */
no_matching_framerate_found:
{
GST_ERROR ("no matching framerate found");
return;
}
failed_to_set_format:
{
GST_ERROR ("failed to set format: 0x%08x", status);
return;
}
failed_to_set_framerate:
{
GST_ERROR ("failed to set framerate: 0x%08x", status);
return;
}
}
static void
gst_mio_video_device_find_closest_framerate (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, TundraFramerate * rate, gpointer user_data)
{
GstMIOFindRateCtx *ctx = user_data;
if (fabs (rate->value - ctx->needle) <= 0.1) {
ctx->closest_match = rate->value;
ctx->success = TRUE;
}
}
FigFormatDescription *
gst_mio_video_device_get_selected_format (GstMIOVideoDevice * self)
{
return self->selected_format;
}
GstClockTime
gst_mio_video_device_get_duration (GstMIOVideoDevice * self)
{
return gst_util_uint64_scale_int (GST_SECOND,
self->selected_fps_d, self->selected_fps_n);
}
static void
gst_mio_video_device_formats_foreach (GstMIOVideoDevice * self,
GstMIOVideoDeviceEachFormatFunc func, gpointer user_data)
{
GstCMApi *cm = self->ctx->cm;
GstMIOApi *mio = self->ctx->mio;
TundraTargetSpec spec = { 0, };
GArray *streams;
guint stream_idx;
spec.name = kTundraDevicePropertyStreams;
spec.scope = kTundraScopeInput;
streams = gst_mio_object_get_array (self->handle, &spec,
sizeof (TundraObjectID), mio);
/* TODO: We only consider the first stream for now */
for (stream_idx = 0; stream_idx != MIN (streams->len, 1); stream_idx++) {
TundraObjectID stream;
CFArrayRef formats;
CFIndex num_formats, fmt_idx;
stream = g_array_index (streams, TundraObjectID, stream_idx);
spec.name = kTundraStreamPropertyFormatDescriptions;
spec.scope = kTundraScopeInput;
formats = gst_mio_object_get_pointer (stream, &spec, mio);
num_formats = CFArrayGetCount (formats);
for (fmt_idx = 0; fmt_idx != num_formats; fmt_idx++) {
GstMIOVideoFormat fmt;
fmt.stream = stream;
fmt.desc = (FigFormatDescription *)
CFArrayGetValueAtIndex (formats, fmt_idx);
if (cm->FigFormatDescriptionGetMediaType (fmt.desc) != kFigMediaTypeVideo)
continue;
fmt.type = cm->FigFormatDescriptionGetMediaSubType (fmt.desc);
fmt.dim = cm->FigVideoFormatDescriptionGetDimensions (fmt.desc);
func (self, &fmt, user_data);
}
}
g_array_free (streams, TRUE);
}
static void
gst_mio_video_device_format_framerates_foreach (GstMIOVideoDevice * self,
GstMIOVideoFormat * format, GstMIOVideoDeviceEachFramerateFunc func,
gpointer user_data)
{
TundraTargetSpec spec = { 0, };
GArray *rates;
guint rate_idx;
spec.name = kTundraStreamPropertyFrameRates;
spec.scope = kTundraScopeInput;
rates = gst_mio_object_get_array_full (format->stream, &spec,
sizeof (format->desc), &format->desc, sizeof (TundraFramerate),
self->ctx->mio);
for (rate_idx = 0; rate_idx != rates->len; rate_idx++) {
TundraFramerate *rate;
rate = &g_array_index (rates, TundraFramerate, rate_idx);
func (self, format, rate, user_data);
}
g_array_free (rates, TRUE);
}
void
gst_mio_video_device_print_debug_info (GstMIOVideoDevice * self)
{
GstCMApi *cm = self->ctx->cm;
GstMIOApi *mio = self->ctx->mio;
TundraTargetSpec spec = { 0, };
gchar *str;
GArray *streams;
guint stream_idx;
g_print ("Device %p with handle %d\n", self, self->handle);
spec.scope = kTundraScopeGlobal;
spec.name = kTundraObjectPropertyClass;
str = gst_mio_object_get_fourcc (self->handle, &spec, mio);
g_print (" Class: '%s'\n", str);
g_free (str);
spec.name = kTundraObjectPropertyCreator;
str = gst_mio_object_get_string (self->handle, &spec, mio);
g_print (" Creator: \"%s\"\n", str);
g_free (str);
spec.name = kTundraDevicePropertyModelUID;
str = gst_mio_object_get_string (self->handle, &spec, mio);
g_print (" Model UID: \"%s\"\n", str);
g_free (str);
spec.name = kTundraDevicePropertyTransportType;
str = gst_mio_object_get_fourcc (self->handle, &spec, mio);
g_print (" Transport Type: '%s'\n", str);
g_free (str);
g_print (" Streams:\n");
spec.name = kTundraDevicePropertyStreams;
spec.scope = kTundraScopeInput;
streams = gst_mio_object_get_array (self->handle, &spec,
sizeof (TundraObjectID), mio);
for (stream_idx = 0; stream_idx != streams->len; stream_idx++) {
TundraObjectID stream;
CFArrayRef formats;
CFIndex num_formats, fmt_idx;
stream = g_array_index (streams, TundraObjectID, stream_idx);
g_print (" stream[%u] = %d\n", stream_idx, stream);
spec.scope = kTundraScopeInput;
spec.name = kTundraStreamPropertyFormatDescriptions;
formats = gst_mio_object_get_pointer (stream, &spec, mio);
num_formats = CFArrayGetCount (formats);
g_print (" <%u formats>\n", (guint) num_formats);
for (fmt_idx = 0; fmt_idx != num_formats; fmt_idx++) {
const FigFormatDescription *fmt;
gchar *media_type;
gchar *media_sub_type;
FigVideoDimensions dim;
GArray *rates;
guint rate_idx;
fmt = CFArrayGetValueAtIndex (formats, fmt_idx);
media_type = gst_mio_fourcc_to_string
(cm->FigFormatDescriptionGetMediaType (fmt));
media_sub_type = gst_mio_fourcc_to_string
(cm->FigFormatDescriptionGetMediaSubType (fmt));
dim = cm->FigVideoFormatDescriptionGetDimensions (fmt);
g_print (" format[%u]: MediaType='%s' MediaSubType='%s' %ux%u\n",
(guint) fmt_idx, media_type, media_sub_type,
(guint) dim.width, (guint) dim.height);
spec.name = kTundraStreamPropertyFrameRates;
rates = gst_mio_object_get_array_full (stream, &spec, sizeof (fmt), &fmt,
sizeof (TundraFramerate), mio);
for (rate_idx = 0; rate_idx != rates->len; rate_idx++) {
TundraFramerate *rate;
rate = &g_array_index (rates, TundraFramerate, rate_idx);
g_print (" %f\n", rate->value);
}
g_array_free (rates, TRUE);
g_free (media_sub_type);
g_free (media_type);
}
}
g_array_free (streams, TRUE);
}
static void
gst_mio_video_device_class_init (GstMIOVideoDeviceClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->dispose = gst_mio_video_device_dispose;
gobject_class->finalize = gst_mio_video_device_finalize;
gobject_class->get_property = gst_mio_video_device_get_property;
gobject_class->set_property = gst_mio_video_device_set_property;
g_object_class_install_property (gobject_class, PROP_CONTEXT,
g_param_spec_pointer ("context", "CoreMedia Context",
"CoreMedia context to use",
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_HANDLE,
g_param_spec_int ("handle", "Handle",
"MIO handle of this video capture device",
G_MININT, G_MAXINT, -1,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_UID,
g_param_spec_string ("uid", "Unique ID",
"Unique ID of this video capture device", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NAME,
g_param_spec_string ("name", "Device Name",
"Name of this video capture device", NULL,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_TRANSPORT,
g_param_spec_uint ("transport", "Transport",
"Transport type of this video capture device",
0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
}
GList *
gst_mio_video_device_list_create (GstCoreMediaCtx * ctx)
{
GList *devices = NULL;
TundraTargetSpec pspec = { 0, };
GArray *handles;
guint handle_idx;
pspec.name = kTundraSystemPropertyDevices;
pspec.scope = kTundraScopeGlobal;
handles = gst_mio_object_get_array (TUNDRA_SYSTEM_OBJECT_ID, &pspec,
sizeof (TundraObjectID), ctx->mio);
if (handles == NULL)
goto beach;
for (handle_idx = 0; handle_idx != handles->len; handle_idx++) {
TundraObjectID handle;
GstMIOVideoDevice *device;
handle = g_array_index (handles, TundraObjectID, handle_idx);
device = g_object_new (GST_TYPE_MIO_VIDEO_DEVICE,
"context", ctx, "handle", handle, NULL);
/* TODO: Skip screen input devices for now */
if (gst_mio_video_device_get_transport_type (device) !=
kTundraDeviceTransportScreen) {
devices = g_list_prepend (devices, device);
} else {
g_object_unref (device);
}
}
devices = g_list_sort (devices, (GCompareFunc) gst_mio_video_device_compare);
g_array_free (handles, TRUE);
beach:
return devices;
}
void
gst_mio_video_device_list_destroy (GList * devices)
{
g_list_foreach (devices, (GFunc) g_object_unref, NULL);
g_list_free (devices);
}
static gint
gst_mio_video_device_compare (GstMIOVideoDevice * a, GstMIOVideoDevice * b)
{
gint score_a, score_b;
score_a = gst_mio_video_device_calculate_score (a);
score_b = gst_mio_video_device_calculate_score (b);
if (score_a > score_b)
return -1;
else if (score_a < score_b)
return 1;
return g_ascii_strcasecmp (gst_mio_video_device_get_name (a),
gst_mio_video_device_get_name (b));
}
static gint
gst_mio_video_device_calculate_score (GstMIOVideoDevice * device)
{
switch (gst_mio_video_device_get_transport_type (device)) {
case kTundraDeviceTransportScreen:
return 0;
case kTundraDeviceTransportBuiltin:
return 1;
case kTundraDeviceTransportUSB:
return 2;
default:
return 3;
}
}

View file

@ -0,0 +1,90 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MIO_VIDEO_DEVICE_H__
#define __GST_MIO_VIDEO_DEVICE_H__
#include <gst/gst.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_TYPE_MIO_VIDEO_DEVICE \
(gst_mio_video_device_get_type ())
#define GST_MIO_VIDEO_DEVICE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MIO_VIDEO_DEVICE, GstMIOVideoDevice))
#define GST_MIO_VIDEO_DEVICE_CAST(obj) \
((GstMIOVideoDevice *) (obj))
#define GST_MIO_VIDEO_DEVICE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MIO_VIDEO_DEVICE, GstMIOVideoDeviceClass))
#define GST_IS_MIO_VIDEO_DEVICE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MIO_VIDEO_DEVICE))
#define GST_IS_MIO_VIDEO_DEVICE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MIO_VIDEO_DEVICE))
typedef struct _GstMIOVideoDevice GstMIOVideoDevice;
typedef struct _GstMIOVideoDeviceClass GstMIOVideoDeviceClass;
struct _GstMIOVideoDevice
{
GObject parent;
GstCoreMediaCtx *ctx;
TundraObjectID handle;
gchar *cached_uid;
gchar *cached_name;
TundraDeviceTransportType cached_transport;
GstCaps *cached_caps;
FigFormatDescription *selected_format;
gint selected_fps_n, selected_fps_d;
};
struct _GstMIOVideoDeviceClass
{
GObjectClass parent_class;
};
GType gst_mio_video_device_get_type (void);
TundraObjectID gst_mio_video_device_get_handle (GstMIOVideoDevice * self);
const gchar * gst_mio_video_device_get_uid (GstMIOVideoDevice * self);
const gchar * gst_mio_video_device_get_name (GstMIOVideoDevice * self);
TundraDeviceTransportType gst_mio_video_device_get_transport_type (
GstMIOVideoDevice * self);
gboolean gst_mio_video_device_open (GstMIOVideoDevice * self);
void gst_mio_video_device_close (GstMIOVideoDevice * self);
GstCaps * gst_mio_video_device_get_available_caps (GstMIOVideoDevice * self);
gboolean gst_mio_video_device_set_caps (GstMIOVideoDevice * self,
GstCaps * caps);
FigFormatDescription * gst_mio_video_device_get_selected_format (
GstMIOVideoDevice * self);
GstClockTime gst_mio_video_device_get_duration (GstMIOVideoDevice * self);
void gst_mio_video_device_print_debug_info (GstMIOVideoDevice * self);
GList * gst_mio_video_device_list_create (GstCoreMediaCtx * ctx);
void gst_mio_video_device_list_destroy (GList * devices);
G_END_DECLS
#endif /* __GST_MIO_VIDEO_DEVICE_H__ */

1200
sys/applemedia/miovideosrc.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,83 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MIO_VIDEO_SRC_H__
#define __GST_MIO_VIDEO_SRC_H__
#include <gst/base/gstpushsrc.h>
#include "coremediactx.h"
#include "miovideodevice.h"
G_BEGIN_DECLS
#define GST_TYPE_MIO_VIDEO_SRC \
(gst_mio_video_src_get_type ())
#define GST_MIO_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MIO_VIDEO_SRC, GstMIOVideoSrc))
#define GST_MIO_VIDEO_SRC_CAST(obj) \
((GstMIOVideoSrc *) (obj))
#define GST_MIO_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MIO_VIDEO_SRC, GstMIOVideoSrcClass))
#define GST_IS_MIO_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MIO_VIDEO_SRC))
#define GST_IS_MIO_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MIO_VIDEO_SRC))
typedef struct _GstMIOVideoSrc GstMIOVideoSrc;
typedef struct _GstMIOVideoSrcClass GstMIOVideoSrcClass;
struct _GstMIOVideoSrc
{
GstPushSrc push_src;
gint cv_ratio_n;
gint cv_ratio_d;
gchar *device_uid;
gchar *device_name;
gint device_index;
GThread *dispatcher_thread;
GMainLoop *dispatcher_loop;
GMainContext *dispatcher_ctx;
GstCoreMediaCtx *ctx;
GstMIOVideoDevice *device;
TundraGraph *graph;
volatile gboolean running;
GQueue *queue;
GMutex *qlock;
GCond *qcond;
guint64 prev_offset;
FigFormatDescription * prev_format;
};
struct _GstMIOVideoSrcClass
{
GstPushSrcClass parent_class;
};
GType gst_mio_video_src_get_type (void);
G_END_DECLS
#endif /* __GST_MIO_VIDEO_SRC_H__ */

63
sys/applemedia/mtapi.c Normal file
View file

@ -0,0 +1,63 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "mtapi.h"
#include "dynapi-internal.h"
#define MT_FRAMEWORK_PATH "/System/Library/PrivateFrameworks/" \
"MediaToolbox.framework/MediaToolbox"
G_DEFINE_TYPE (GstMTApi, gst_mt_api, GST_TYPE_DYN_API);
static void
gst_mt_api_init (GstMTApi * self)
{
}
static void
gst_mt_api_class_init (GstMTApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstMTApi, name)
GstMTApi *
gst_mt_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (FigCaptureDeviceGetFigBaseObject),
SYM_SPEC (FigCaptureStreamGetFigBaseObject),
SYM_SPEC (kFigCaptureDeviceProperty_ImagerFormatDescription),
SYM_SPEC (kFigCaptureDeviceProperty_ImagerFrameRate),
SYM_SPEC (kFigCaptureDeviceProperty_ImagerMinimumFrameRate),
SYM_SPEC (kFigCaptureDeviceProperty_ImagerSupportedFormatsArray),
SYM_SPEC (kFigCaptureStreamProperty_BufferQueue),
SYM_SPEC (kFigImagerSupportedFormat_FormatDescription),
SYM_SPEC (kFigImagerSupportedFormat_IsBinned),
SYM_SPEC (kFigImagerSupportedFormat_MaxFrameRate),
SYM_SPEC (kFigImagerSupportedFormat_ScaleFactor),
{NULL, 0},
};
return _gst_dyn_api_new (gst_mt_api_get_type (), MT_FRAMEWORK_PATH, symbols,
error);
}

72
sys/applemedia/mtapi.h Normal file
View file

@ -0,0 +1,72 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MT_API_H__
#define __GST_MT_API_H__
#include "cmapi.h"
G_BEGIN_DECLS
typedef struct _GstMTApi GstMTApi;
typedef struct _GstMTApiClass GstMTApiClass;
typedef struct _FigCaptureDevice * FigCaptureDeviceRef;
typedef struct _FigCaptureStream * FigCaptureStreamRef;
typedef struct _FigCaptureStreamIface FigCaptureStreamIface;
struct _FigCaptureStreamIface
{
gsize unk;
FigStatus (* Start) (FigCaptureStreamRef stream);
FigStatus (* Stop) (FigCaptureStreamRef stream);
};
struct _GstMTApi
{
GstDynApi parent;
FigBaseObjectRef (* FigCaptureDeviceGetFigBaseObject)
(FigCaptureDeviceRef device);
FigBaseObjectRef (* FigCaptureStreamGetFigBaseObject)
(FigCaptureStreamRef stream);
CFStringRef * kFigCaptureDeviceProperty_ImagerFormatDescription;
CFStringRef * kFigCaptureDeviceProperty_ImagerFrameRate;
CFStringRef * kFigCaptureDeviceProperty_ImagerMinimumFrameRate;
CFStringRef * kFigCaptureDeviceProperty_ImagerSupportedFormatsArray;
CFStringRef * kFigCaptureStreamProperty_BufferQueue;
CFStringRef * kFigImagerSupportedFormat_FormatDescription;
CFStringRef * kFigImagerSupportedFormat_IsBinned;
CFStringRef * kFigImagerSupportedFormat_MaxFrameRate;
CFStringRef * kFigImagerSupportedFormat_ScaleFactor;
};
struct _GstMTApiClass
{
GstDynApiClass parent_class;
};
GType gst_mt_api_get_type (void);
GstMTApi * gst_mt_api_obtain (GError ** error);
G_END_DECLS
#endif

79
sys/applemedia/plugin.m Normal file
View file

@ -0,0 +1,79 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#ifdef HAVE_IOS
#include "iphonecamerasrc.h"
#else
#include "qtkitvideosrc.h"
#include "miovideosrc.h"
#include <Foundation/Foundation.h>
#endif
#include "vth264decbin.h"
#include "vth264encbin.h"
#include "vtenc.h"
#include "vtdec.h"
#ifndef HAVE_IOS
static void
enable_mt_mode (void)
{
NSThread * th = [[NSThread alloc] init];
[th start];
[th release];
g_assert ([NSThread isMultiThreaded]);
}
#endif
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean res = TRUE;
#ifdef HAVE_IOS
res = gst_element_register (plugin, "iphonecamerasrc", GST_RANK_NONE,
GST_TYPE_IPHONE_CAMERA_SRC);
#else
enable_mt_mode ();
res = gst_element_register (plugin, "qtkitvideosrc", GST_RANK_NONE,
GST_TYPE_QTKIT_VIDEO_SRC);
res &= gst_element_register (plugin, "miovideosrc", GST_RANK_NONE,
GST_TYPE_MIO_VIDEO_SRC);
#endif
res &= gst_element_register (plugin, "vth264decbin", GST_RANK_NONE,
GST_TYPE_VT_H264_DEC_BIN);
res &= gst_element_register (plugin, "vth264encbin", GST_RANK_NONE,
GST_TYPE_VT_H264_ENC_BIN);
gst_vtenc_register_elements (plugin);
gst_vtdec_register_elements (plugin);
return res;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"applemedia",
"Elements for capture and codec access on Apple OS X and iOS",
plugin_init, VERSION, "LGPL", "GStreamer", "http://gstreamer.net/")

View file

@ -0,0 +1,61 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_QTKIT_VIDEO_SRC_H__
#define __GST_QTKIT_VIDEO_SRC_H__
#include <gst/base/gstpushsrc.h>
G_BEGIN_DECLS
#define GST_TYPE_QTKIT_VIDEO_SRC \
(gst_qtkit_video_src_get_type ())
#define GST_QTKIT_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_QTKIT_VIDEO_SRC, GstQTKitVideoSrc))
#define GST_QTKIT_VIDEO_SRC_CAST(obj) \
((GstQTKitVideoSrc *) (obj))
#define GST_QTKIT_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_QTKIT_VIDEO_SRC, GstQTKitVideoSrcClass))
#define GST_QTKIT_VIDEO_SRC_IMPL(obj) \
((GstQTKitVideoSrcImpl *) GST_QTKIT_VIDEO_SRC_CAST (obj)->impl)
#define GST_IS_QTKIT_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_QTKIT_VIDEO_SRC))
#define GST_IS_QTKIT_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_QTKIT_VIDEO_SRC))
typedef struct _GstQTKitVideoSrc GstQTKitVideoSrc;
typedef struct _GstQTKitVideoSrcClass GstQTKitVideoSrcClass;
struct _GstQTKitVideoSrc
{
GstPushSrc push_src;
gpointer impl;
};
struct _GstQTKitVideoSrcClass
{
GstPushSrcClass parent_class;
};
GType gst_qtkit_video_src_get_type (void);
G_END_DECLS
#endif /* __GST_QTKIT_VIDEO_SRC_H__ */

View file

@ -0,0 +1,711 @@
/*
* Copyright (C) 2009 Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "qtkitvideosrc.h"
#import <QTKit/QTKit.h>
#define DEFAULT_DEVICE_INDEX -1
#define DEVICE_YUV_FOURCC "UYVY"
#define DEVICE_FPS_N 30
#define DEVICE_FPS_D 1
#define FRAME_QUEUE_SIZE 2
GST_DEBUG_CATEGORY (gst_qtkit_video_src_debug);
#define GST_CAT_DEFAULT gst_qtkit_video_src_debug
static const GstElementDetails element_details = {
"QTKitVideoSrc",
"Source/Video",
"Stream data from a video capture device through QTKit",
"Ole André Vadla Ravnås <ole.andre.ravnas@tandberg.com>"
};
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"width = (int) 640, "
"height = (int) 480, "
"framerate = (fraction) " G_STRINGIFY (DEVICE_FPS_N) "/"
G_STRINGIFY (DEVICE_FPS_D) ", "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"width = (int) 160, "
"height = (int) 120, "
"framerate = (fraction) " G_STRINGIFY (DEVICE_FPS_N) "/"
G_STRINGIFY (DEVICE_FPS_D) ", "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"width = (int) 176, "
"height = (int) 144, "
"framerate = (fraction) " G_STRINGIFY (DEVICE_FPS_N) "/"
G_STRINGIFY (DEVICE_FPS_D) ", "
"pixel-aspect-ratio = (fraction) 12/11"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"width = (int) 320, "
"height = (int) 240, "
"framerate = (fraction) " G_STRINGIFY (DEVICE_FPS_N) "/"
G_STRINGIFY (DEVICE_FPS_D) ", "
"pixel-aspect-ratio = (fraction) 1/1"
"; "
"video/x-raw-yuv, "
"format = (fourcc) " DEVICE_YUV_FOURCC ", "
"width = (int) 352, "
"height = (int) 288, "
"framerate = (fraction) " G_STRINGIFY (DEVICE_FPS_N) "/"
G_STRINGIFY (DEVICE_FPS_D) ", "
"pixel-aspect-ratio = (fraction) 12/11"
";"
)
);
typedef enum _QueueState {
NO_FRAMES = 1,
HAS_FRAME_OR_STOP_REQUEST,
} QueueState;
static GstPushSrcClass * parent_class;
@interface GstQTKitVideoSrcImpl : NSObject {
GstElement *element;
GstBaseSrc *baseSrc;
GstPushSrc *pushSrc;
int deviceIndex;
QTCaptureSession *session;
QTCaptureDeviceInput *input;
QTCaptureDecompressedVideoOutput *output;
QTCaptureDevice *device;
NSConditionLock *queueLock;
NSMutableArray *queue;
BOOL stopRequest;
gint width, height;
GstClockTime duration;
guint64 offset;
GstClockTime prev_ts;
}
- (id)init;
- (id)initWithSrc:(GstPushSrc *)src;
@property int deviceIndex;
- (BOOL)openDevice;
- (void)closeDevice;
- (BOOL)setCaps:(GstCaps *)caps;
- (BOOL)start;
- (BOOL)stop;
- (BOOL)unlock;
- (BOOL)unlockStop;
- (BOOL)query:(GstQuery *)query;
- (GstStateChangeReturn)changeState:(GstStateChange)transition;
- (GstFlowReturn)create:(GstBuffer **)buf;
- (BOOL)timestampBuffer:(GstBuffer *)buf;
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection;
@end
@implementation GstQTKitVideoSrcImpl
- (id)init
{
return [self initWithSrc:NULL];
}
- (id)initWithSrc:(GstPushSrc *)src
{
if ((self = [super init])) {
element = GST_ELEMENT_CAST (src);
baseSrc = GST_BASE_SRC_CAST (src);
pushSrc = src;
deviceIndex = DEFAULT_DEVICE_INDEX;
device = nil;
gst_base_src_set_live (baseSrc, TRUE);
gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
}
return self;
}
@synthesize deviceIndex;
- (BOOL)openDevice
{
NSString *mediaType;
NSError *error = nil;
mediaType = QTMediaTypeVideo;
if (deviceIndex == -1) {
device = [QTCaptureDevice defaultInputDeviceWithMediaType:mediaType];
if (device == nil) {
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
("No video capture devices found"), (NULL));
return NO;
}
} else {
NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:mediaType];
if (deviceIndex >= [devices count]) {
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
("Invalid video capture device index"), (NULL));
return NO;
}
device = [devices objectAtIndex:deviceIndex];
}
GST_INFO ("Opening '%s'", [[device localizedDisplayName] UTF8String]);
if (![device open:&error]) {
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
("Failed to open device '%s'",
[[device localizedDisplayName] UTF8String]), (NULL));
return NO;
}
return YES;
}
- (void)closeDevice
{
g_assert (![session isRunning]);
[session release];
session = nil;
[input release];
input = nil;
[output release];
output = nil;
device = nil;
}
- (BOOL)setCaps:(GstCaps *)caps
{
GstStructure *s;
NSDictionary *outputAttrs;
BOOL success;
g_assert (device != nil);
s = gst_caps_get_structure (caps, 0);
gst_structure_get_int (s, "width", &width);
gst_structure_get_int (s, "height", &height);
input = [[QTCaptureDeviceInput alloc] initWithDevice:device];
output = [[QTCaptureDecompressedVideoOutput alloc] init];
#if MAC_OS_X_VERSION_MIN_REQUIRED >= 1060
[output setAutomaticallyDropsLateVideoFrames:YES];
#endif
outputAttrs = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:k2vuyPixelFormat],
(id)kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithUnsignedInt:width],
(id)kCVPixelBufferWidthKey,
[NSNumber numberWithUnsignedInt:height],
(id)kCVPixelBufferHeightKey,
nil
];
[output setPixelBufferAttributes:outputAttrs];
session = [[QTCaptureSession alloc] init];
success = [session addInput:input
error:nil];
g_assert (success);
success = [session addOutput:output
error:nil];
g_assert (success);
[output setDelegate:self];
[session startRunning];
return YES;
}
- (BOOL)start
{
queueLock = [[NSConditionLock alloc] initWithCondition:NO_FRAMES];
queue = [[NSMutableArray alloc] initWithCapacity:FRAME_QUEUE_SIZE];
stopRequest = NO;
duration = gst_util_uint64_scale (GST_SECOND, DEVICE_FPS_D, DEVICE_FPS_N);
offset = 0;
prev_ts = GST_CLOCK_TIME_NONE;
return YES;
}
- (BOOL)stop
{
[session stopRunning];
[output setDelegate:nil];
for (id frame in queue)
CVBufferRelease ((CVImageBufferRef) frame);
[queueLock release];
queueLock = nil;
[queue release];
queue = nil;
return YES;
}
- (BOOL)query:(GstQuery *)query
{
BOOL result = NO;
if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
if (device != nil) {
GstClockTime min_latency, max_latency;
min_latency = max_latency = duration; /* for now */
result = YES;
GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
" max %" GST_TIME_FORMAT,
GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
gst_query_set_latency (query, TRUE, min_latency, max_latency);
}
} else {
result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
}
return result;
}
- (BOOL)unlock
{
[queueLock lock];
stopRequest = YES;
[queueLock unlockWithCondition:HAS_FRAME_OR_STOP_REQUEST];
return YES;
}
- (BOOL)unlockStop
{
[queueLock lock];
stopRequest = NO;
[queueLock unlock];
return YES;
}
- (GstStateChangeReturn)changeState:(GstStateChange)transition
{
GstStateChangeReturn ret;
if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
if (![self openDevice])
return GST_STATE_CHANGE_FAILURE;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (transition == GST_STATE_CHANGE_READY_TO_NULL)
[self closeDevice];
return ret;
}
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection
{
[queueLock lock];
if (stopRequest) {
[queueLock unlock];
return;
}
if ([queue count] == FRAME_QUEUE_SIZE) {
CVBufferRelease ((CVImageBufferRef) [queue lastObject]);
[queue removeLastObject];
}
CVBufferRetain (videoFrame);
[queue insertObject:(id)videoFrame
atIndex:0];
[queueLock unlockWithCondition:HAS_FRAME_OR_STOP_REQUEST];
}
- (GstFlowReturn)create:(GstBuffer **)buf
{
*buf = NULL;
do {
CVPixelBufferRef frame;
[queueLock lockWhenCondition:HAS_FRAME_OR_STOP_REQUEST];
if (stopRequest) {
[queueLock unlock];
return GST_FLOW_WRONG_STATE;
}
frame = (CVPixelBufferRef) [queue lastObject];
[queue removeLastObject];
[queueLock unlockWithCondition:
([queue count] == 0) ? NO_FRAMES : HAS_FRAME_OR_STOP_REQUEST];
if (*buf != NULL)
gst_buffer_unref (*buf);
*buf = gst_buffer_new_and_alloc (
CVPixelBufferGetBytesPerRow (frame) * CVPixelBufferGetHeight (frame));
CVPixelBufferLockBaseAddress (frame, 0);
memcpy (GST_BUFFER_DATA (*buf), CVPixelBufferGetBaseAddress (frame),
GST_BUFFER_SIZE (*buf));
CVPixelBufferUnlockBaseAddress (frame, 0);
CVBufferRelease (frame);
} while (![self timestampBuffer:*buf]);
return GST_FLOW_OK;
}
- (BOOL)timestampBuffer:(GstBuffer *)buf
{
GstClock *clock;
GstClockTime timestamp;
GST_OBJECT_LOCK (element);
clock = GST_ELEMENT_CLOCK (element);
if (clock != NULL) {
gst_object_ref (clock);
timestamp = element->base_time;
} else {
timestamp = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (element);
if (clock != NULL) {
/* The time according to the current clock */
timestamp = gst_clock_get_time (clock) - timestamp;
if (timestamp > duration)
timestamp -= duration;
else
timestamp = 0;
gst_object_unref (clock);
clock = NULL;
/* Unless it's the first frame, align the current timestamp on a multiple
* of duration since the previous */
if (GST_CLOCK_TIME_IS_VALID (prev_ts)) {
GstClockTime delta;
guint delta_remainder, delta_offset;
if (timestamp < prev_ts) {
GST_DEBUG_OBJECT (element, "clock is ticking backwards");
return NO;
}
/* Round to a duration boundary */
delta = timestamp - prev_ts;
delta_remainder = delta % duration;
if (delta_remainder < duration / 3)
timestamp -= delta_remainder;
else
timestamp += duration - delta_remainder;
/* How many frames are we off then? */
delta = timestamp - prev_ts;
delta_offset = delta / duration;
if (delta_offset == 1) /* perfect */
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
else if (delta_offset > 1) {
guint lost = delta_offset - 1;
GST_DEBUG_OBJECT (element, "lost %d frame%s, setting discont flag",
lost, (lost > 1) ? "s" : "");
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
} else if (delta_offset == 0) { /* overproduction, skip this frame */
GST_DEBUG_OBJECT (element, "skipping frame");
return NO;
}
offset += delta_offset;
}
prev_ts = timestamp;
}
GST_BUFFER_OFFSET (buf) = offset;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
return YES;
}
@end
/*
* Glue code
*/
enum
{
PROP_0,
PROP_DEVICE_INDEX
};
GST_BOILERPLATE (GstQTKitVideoSrc, gst_qtkit_video_src, GstPushSrc,
GST_TYPE_PUSH_SRC);
static void gst_qtkit_video_src_finalize (GObject * obj);
static void gst_qtkit_video_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_qtkit_video_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_qtkit_video_src_change_state (
GstElement * element, GstStateChange transition);
static gboolean gst_qtkit_video_src_set_caps (GstBaseSrc * basesrc,
GstCaps * caps);
static gboolean gst_qtkit_video_src_start (GstBaseSrc * basesrc);
static gboolean gst_qtkit_video_src_stop (GstBaseSrc * basesrc);
static gboolean gst_qtkit_video_src_query (GstBaseSrc * basesrc,
GstQuery * query);
static gboolean gst_qtkit_video_src_unlock (GstBaseSrc * basesrc);
static gboolean gst_qtkit_video_src_unlock_stop (GstBaseSrc * basesrc);
static GstFlowReturn gst_qtkit_video_src_create (GstPushSrc * pushsrc,
GstBuffer ** buf);
static void
gst_qtkit_video_src_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details (element_class, &element_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
}
static void
gst_qtkit_video_src_class_init (GstQTKitVideoSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
gobject_class->finalize = gst_qtkit_video_src_finalize;
gobject_class->get_property = gst_qtkit_video_src_get_property;
gobject_class->set_property = gst_qtkit_video_src_set_property;
gstelement_class->change_state = gst_qtkit_video_src_change_state;
gstbasesrc_class->set_caps = gst_qtkit_video_src_set_caps;
gstbasesrc_class->start = gst_qtkit_video_src_start;
gstbasesrc_class->stop = gst_qtkit_video_src_stop;
gstbasesrc_class->query = gst_qtkit_video_src_query;
gstbasesrc_class->unlock = gst_qtkit_video_src_unlock;
gstbasesrc_class->unlock_stop = gst_qtkit_video_src_unlock_stop;
gstpushsrc_class->create = gst_qtkit_video_src_create;
g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
g_param_spec_int ("device-index", "Device Index",
"The zero-based device index",
-1, G_MAXINT, DEFAULT_DEVICE_INDEX,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (gst_qtkit_video_src_debug, "qtkitvideosrc",
0, "Mac OS X QTKit video source");
}
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
static void
gst_qtkit_video_src_init (GstQTKitVideoSrc * src, GstQTKitVideoSrcClass * gclass)
{
OBJC_CALLOUT_BEGIN ();
src->impl = [[GstQTKitVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
OBJC_CALLOUT_END ();
}
static void
gst_qtkit_video_src_finalize (GObject * obj)
{
OBJC_CALLOUT_BEGIN ();
[GST_QTKIT_VIDEO_SRC_IMPL (obj) release];
OBJC_CALLOUT_END ();
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static void
gst_qtkit_video_src_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstQTKitVideoSrcImpl *impl = GST_QTKIT_VIDEO_SRC_IMPL (object);
switch (prop_id) {
case PROP_DEVICE_INDEX:
g_value_set_int (value, impl.deviceIndex);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_qtkit_video_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstQTKitVideoSrcImpl *impl = GST_QTKIT_VIDEO_SRC_IMPL (object);
switch (prop_id) {
case PROP_DEVICE_INDEX:
impl.deviceIndex = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_qtkit_video_src_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (element) changeState: transition];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_start (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) start];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_stop (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) stop];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) query:query];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_unlock (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) unlock];
OBJC_CALLOUT_END ();
return ret;
}
static gboolean
gst_qtkit_video_src_unlock_stop (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (basesrc) unlockStop];
OBJC_CALLOUT_END ();
return ret;
}
static GstFlowReturn
gst_qtkit_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
{
GstFlowReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_QTKIT_VIDEO_SRC_IMPL (pushsrc) create: buf];
OBJC_CALLOUT_END ();
return ret;
}

88
sys/applemedia/vtapi.c Normal file
View file

@ -0,0 +1,88 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "vtapi.h"
#include "dynapi-internal.h"
#define VT_FRAMEWORK_PATH "/System/Library/PrivateFrameworks/" \
"VideoToolbox.framework/VideoToolbox"
G_DEFINE_TYPE (GstVTApi, gst_vt_api, GST_TYPE_DYN_API);
static void
gst_vt_api_init (GstVTApi * self)
{
}
static void
gst_vt_api_class_init (GstVTApiClass * klass)
{
}
#define SYM_SPEC(name) GST_DYN_SYM_SPEC (GstVTApi, name)
GstVTApi *
gst_vt_api_obtain (GError ** error)
{
static const GstDynSymSpec symbols[] = {
SYM_SPEC (VTCompressionSessionCompleteFrames),
SYM_SPEC (VTCompressionSessionCopyProperty),
SYM_SPEC (VTCompressionSessionCopySupportedPropertyDictionary),
SYM_SPEC (VTCompressionSessionCreate),
SYM_SPEC (VTCompressionSessionEncodeFrame),
SYM_SPEC (VTCompressionSessionInvalidate),
SYM_SPEC (VTCompressionSessionRelease),
SYM_SPEC (VTCompressionSessionRetain),
SYM_SPEC (VTCompressionSessionSetProperty),
SYM_SPEC (VTDecompressionSessionCreate),
SYM_SPEC (VTDecompressionSessionDecodeFrame),
SYM_SPEC (VTDecompressionSessionInvalidate),
SYM_SPEC (VTDecompressionSessionRelease),
SYM_SPEC (VTDecompressionSessionRetain),
SYM_SPEC (VTDecompressionSessionWaitForAsynchronousFrames),
SYM_SPEC (kVTCompressionPropertyKey_AllowTemporalCompression),
SYM_SPEC (kVTCompressionPropertyKey_AverageDataRate),
SYM_SPEC (kVTCompressionPropertyKey_ExpectedFrameRate),
SYM_SPEC (kVTCompressionPropertyKey_ExpectedDuration),
SYM_SPEC (kVTCompressionPropertyKey_MaxKeyFrameInterval),
SYM_SPEC (kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration),
SYM_SPEC (kVTCompressionPropertyKey_ProfileLevel),
SYM_SPEC (kVTCompressionPropertyKey_Usage),
SYM_SPEC (kVTEncodeFrameOptionKey_ForceKeyFrame),
SYM_SPEC (kVTProfileLevel_H264_Baseline_1_3),
SYM_SPEC (kVTProfileLevel_H264_Baseline_3_0),
SYM_SPEC (kVTProfileLevel_H264_Extended_5_0),
SYM_SPEC (kVTProfileLevel_H264_High_5_0),
SYM_SPEC (kVTProfileLevel_H264_Main_3_0),
SYM_SPEC (kVTProfileLevel_H264_Main_3_1),
SYM_SPEC (kVTProfileLevel_H264_Main_4_0),
SYM_SPEC (kVTProfileLevel_H264_Main_4_1),
SYM_SPEC (kVTProfileLevel_H264_Main_5_0),
{NULL, 0},
};
return _gst_dyn_api_new (gst_vt_api_get_type (), VT_FRAMEWORK_PATH, symbols,
error);
}

146
sys/applemedia/vtapi.h Normal file
View file

@ -0,0 +1,146 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VT_API_H__
#define __GST_VT_API_H__
#include "cmapi.h"
G_BEGIN_DECLS
typedef struct _GstVTApi GstVTApi;
typedef struct _GstVTApiClass GstVTApiClass;
typedef enum _VTStatus VTStatus;
typedef guint32 VTFormatId;
typedef struct _VTCompressionSession VTCompressionSession;
typedef struct _VTDecompressionSession VTDecompressionSession;
typedef struct _VTCompressionOutputCallback VTCompressionOutputCallback;
typedef struct _VTDecompressionOutputCallback VTDecompressionOutputCallback;
typedef VTStatus (* VTCompressionOutputCallbackFunc) (void * data, int a2,
int a3, int a4, FigSampleBuffer * sbuf, int a6, int a7);
typedef void (* VTDecompressionOutputCallbackFunc) (void * data, gsize unk1,
VTStatus result, gsize unk2, CVBufferRef cvbuf);
enum _VTStatus
{
kVTSuccess = 0
};
enum _VTFormat
{
kVTFormatH264 = 'avc1',
kVTFormatJPEG = 'jpeg'
};
struct _VTCompressionOutputCallback
{
VTCompressionOutputCallbackFunc func;
void * data;
};
struct _VTDecompressionOutputCallback
{
VTDecompressionOutputCallbackFunc func;
void * data;
};
struct _GstVTApi
{
GstDynApi parent;
VTStatus (* VTCompressionSessionCompleteFrames)
(VTCompressionSession * session, FigTime completeUntilDisplayTimestamp);
VTStatus (* VTCompressionSessionCopyProperty)
(VTCompressionSession * session, CFTypeRef key, void* unk,
CFTypeRef * value);
VTStatus (* VTCompressionSessionCopySupportedPropertyDictionary)
(VTCompressionSession * session, CFDictionaryRef * dict);
VTStatus (* VTCompressionSessionCreate)
(CFAllocatorRef allocator, gint width, gint height, VTFormatId formatId,
gsize unk1, CFDictionaryRef sourcePixelBufferAttributes, gsize unk2,
VTCompressionOutputCallback outputCallback,
VTCompressionSession ** session);
VTStatus (* VTCompressionSessionEncodeFrame)
(VTCompressionSession * session, CVPixelBufferRef pixelBuffer,
FigTime displayTimestamp, FigTime displayDuration,
CFDictionaryRef frameOptions, void * sourceTrackingCallback,
void * sourceFrameRefCon);
void (* VTCompressionSessionInvalidate)
(VTCompressionSession * session);
void (* VTCompressionSessionRelease)
(VTCompressionSession * session);
VTCompressionSession * (* VTCompressionSessionRetain)
(VTCompressionSession * session);
VTStatus (* VTCompressionSessionSetProperty)
(VTCompressionSession * session, CFStringRef propName,
CFTypeRef propValue);
VTStatus (* VTDecompressionSessionCreate)
(CFAllocatorRef allocator, FigFormatDescription * videoFormatDescription,
CFTypeRef sessionOptions, CFDictionaryRef destinationPixelBufferAttributes,
VTDecompressionOutputCallback * outputCallback,
VTDecompressionSession ** session);
VTStatus (* VTDecompressionSessionDecodeFrame)
(VTDecompressionSession * session, FigSampleBuffer * sbuf, gsize unk1,
gsize unk2, gsize unk3);
void (* VTDecompressionSessionInvalidate)
(VTDecompressionSession * session);
void (* VTDecompressionSessionRelease)
(VTDecompressionSession * session);
VTDecompressionSession * (* VTDecompressionSessionRetain)
(VTDecompressionSession * session);
VTStatus (* VTDecompressionSessionWaitForAsynchronousFrames)
(VTDecompressionSession * session);
CFStringRef * kVTCompressionPropertyKey_AllowTemporalCompression;
CFStringRef * kVTCompressionPropertyKey_AverageDataRate;
CFStringRef * kVTCompressionPropertyKey_ExpectedFrameRate;
CFStringRef * kVTCompressionPropertyKey_ExpectedDuration;
CFStringRef * kVTCompressionPropertyKey_MaxKeyFrameInterval;
CFStringRef * kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration;
CFStringRef * kVTCompressionPropertyKey_ProfileLevel;
CFStringRef * kVTCompressionPropertyKey_Usage;
CFStringRef * kVTEncodeFrameOptionKey_ForceKeyFrame;
CFStringRef * kVTProfileLevel_H264_Baseline_1_3;
CFStringRef * kVTProfileLevel_H264_Baseline_3_0;
CFStringRef * kVTProfileLevel_H264_Extended_5_0;
CFStringRef * kVTProfileLevel_H264_High_5_0;
CFStringRef * kVTProfileLevel_H264_Main_3_0;
CFStringRef * kVTProfileLevel_H264_Main_3_1;
CFStringRef * kVTProfileLevel_H264_Main_4_0;
CFStringRef * kVTProfileLevel_H264_Main_4_1;
CFStringRef * kVTProfileLevel_H264_Main_5_0;
};
struct _GstVTApiClass
{
GstDynApiClass parent_class;
};
GType gst_vt_api_get_type (void);
GstVTApi * gst_vt_api_obtain (GError ** error);
G_END_DECLS
#endif

507
sys/applemedia/vtdec.c Normal file
View file

@ -0,0 +1,507 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "vtdec.h"
#include "corevideobuffer.h"
#include "vtutil.h"
GST_DEBUG_CATEGORY (gst_vtdec_debug);
#define GST_CAT_DEFAULT (gst_vtdec_debug)
#define GST_VTDEC_CODEC_DETAILS_QDATA \
g_quark_from_static_string ("vtdec-codec-details")
static GstElementClass *parent_class = NULL;
static GstStateChangeReturn gst_vtdec_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_vtdec_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_vtdec_chain (GstPad * pad, GstBuffer * buf);
static FigFormatDescription *gst_vtdec_create_format_description
(GstVTDec * self);
static FigFormatDescription *gst_vtdec_create_format_description_from_codec_data
(GstVTDec * self, GstBuffer * codec_data);
static VTDecompressionSession *gst_vtdec_create_session (GstVTDec * self,
FigFormatDescription * fmt_desc);
static void gst_vtdec_destroy_session (GstVTDec * self,
VTDecompressionSession ** session);
static GstFlowReturn gst_vtdec_decode_buffer (GstVTDec * self, GstBuffer * buf);
static void gst_vtdec_output_frame (void *data, gsize unk1, VTStatus result,
gsize unk2, CVBufferRef cvbuf);
static FigSampleBuffer *gst_vtdec_sample_buffer_from (GstVTDec * self,
GstBuffer * buf);
static void
gst_vtdec_base_init (GstVTDecClass * klass)
{
const GstVTDecoderDetails *codec_details =
GST_VTDEC_CLASS_GET_CODEC_DETAILS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
const int min_width = 1, max_width = G_MAXINT;
const int min_height = 1, max_height = G_MAXINT;
const int min_fps_n = 0, max_fps_n = G_MAXINT;
const int min_fps_d = 1, max_fps_d = 1;
GstPadTemplate *sink_template, *src_template;
GstCaps *sink_caps;
GstElementDetails details;
details.longname = g_strdup_printf ("%s decoder", codec_details->name);
details.klass = g_strdup_printf ("Codec/Decoder/Video");
details.description = g_strdup_printf ("%s decoder", codec_details->name);
gst_element_class_set_details_simple (element_class,
details.longname, details.klass, details.description,
"Ole André Vadla Ravnås <oravnas@cisco.com>");
g_free (details.longname);
g_free (details.klass);
g_free (details.description);
sink_caps = gst_caps_new_simple (codec_details->mimetype,
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE,
min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL);
if (codec_details->format_id == kVTFormatH264) {
gst_structure_set (gst_caps_get_structure (sink_caps, 0),
"stream-format", G_TYPE_STRING, "avc-sample", NULL);
}
sink_template = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
sink_caps);
gst_element_class_add_pad_template (element_class, sink_template);
src_template = gst_pad_template_new ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE,
min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL));
gst_element_class_add_pad_template (element_class, src_template);
}
static void
gst_vtdec_class_init (GstVTDecClass * klass)
{
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstelement_class->change_state = gst_vtdec_change_state;
}
static void
gst_vtdec_init (GstVTDec * self)
{
GstVTDecClass *klass = (GstVTDecClass *) G_OBJECT_GET_CLASS (self);
GstElementClass *element_klass = GST_ELEMENT_CLASS (klass);
GstElement *element = GST_ELEMENT (self);
self->details = GST_VTDEC_CLASS_GET_CODEC_DETAILS (klass);
self->sinkpad = gst_pad_new_from_template
(gst_element_class_get_pad_template (element_klass, "sink"), "sink");
gst_element_add_pad (element, self->sinkpad);
gst_pad_set_setcaps_function (self->sinkpad, gst_vtdec_sink_setcaps);
gst_pad_set_chain_function (self->sinkpad, gst_vtdec_chain);
self->srcpad = gst_pad_new_from_template
(gst_element_class_get_pad_template (element_klass, "src"), "src");
gst_element_add_pad (element, self->srcpad);
}
static GstStateChangeReturn
gst_vtdec_change_state (GstElement * element, GstStateChange transition)
{
GstVTDec *self = GST_VTDEC_CAST (element);
GError *error = NULL;
GstStateChangeReturn ret;
if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
self->ctx = gst_core_media_ctx_new (GST_API_CORE_VIDEO | GST_API_CORE_MEDIA
| GST_API_VIDEO_TOOLBOX, &error);
if (error != NULL)
goto api_error;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (transition == GST_STATE_CHANGE_READY_TO_NULL) {
gst_vtdec_destroy_session (self, &self->session);
self->ctx->cm->FigFormatDescriptionRelease (self->fmt_desc);
self->fmt_desc = NULL;
self->negotiated_width = self->negotiated_height = 0;
self->negotiated_fps_n = self->negotiated_fps_d = 0;
self->caps_width = self->caps_height = 0;
self->caps_fps_n = self->caps_fps_d = 0;
g_object_unref (self->ctx);
self->ctx = NULL;
}
return ret;
api_error:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("API error"),
("%s", error->message));
g_clear_error (&error);
return GST_STATE_CHANGE_FAILURE;
}
}
static gboolean
gst_vtdec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstVTDec *self = GST_VTDEC_CAST (GST_PAD_PARENT (pad));
GstStructure *structure;
FigFormatDescription *fmt_desc = NULL;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &self->negotiated_width))
goto incomplete_caps;
if (!gst_structure_get_int (structure, "height", &self->negotiated_height))
goto incomplete_caps;
gst_structure_get_fraction (structure, "framerate",
&self->negotiated_fps_n, &self->negotiated_fps_d);
/* FIXME */
if (self->negotiated_fps_n == 0)
self->negotiated_fps_n = 30;
if (self->negotiated_fps_d == 0)
self->negotiated_fps_d = 1;
if (self->details->format_id == kVTFormatH264) {
const GValue *codec_data_value;
codec_data_value = gst_structure_get_value (structure, "codec_data");
if (codec_data_value != NULL) {
fmt_desc = gst_vtdec_create_format_description_from_codec_data (self,
gst_value_get_buffer (codec_data_value));
} else {
GST_DEBUG_OBJECT (self, "no codec_data in caps, awaiting future setcaps");
}
} else {
fmt_desc = gst_vtdec_create_format_description (self);
}
if (fmt_desc != NULL) {
gst_vtdec_destroy_session (self, &self->session);
self->ctx->cm->FigFormatDescriptionRelease (self->fmt_desc);
self->fmt_desc = fmt_desc;
self->session = gst_vtdec_create_session (self, fmt_desc);
if (self->session == NULL)
goto session_create_error;
}
return TRUE;
/* ERRORS */
incomplete_caps:
{
self->negotiated_width = self->negotiated_height = -1;
return TRUE;
}
session_create_error:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("failed to create session"), (NULL));
return FALSE;
}
}
static gboolean
gst_vtdec_is_negotiated (GstVTDec * self)
{
return self->negotiated_width != 0;
}
static gboolean
gst_vtdec_negotiate_downstream (GstVTDec * self)
{
gboolean result;
GstCaps *caps;
GstStructure *s;
if (self->caps_width == self->negotiated_width &&
self->caps_height == self->negotiated_height &&
self->caps_fps_n == self->negotiated_fps_n &&
self->caps_fps_d == self->negotiated_fps_d) {
return TRUE;
}
caps = gst_caps_copy (gst_pad_get_pad_template_caps (self->srcpad));
s = gst_caps_get_structure (caps, 0);
gst_structure_set (s,
"width", G_TYPE_INT, self->negotiated_width,
"height", G_TYPE_INT, self->negotiated_height,
"framerate", GST_TYPE_FRACTION,
self->negotiated_fps_n, self->negotiated_fps_d, NULL);
result = gst_pad_set_caps (self->srcpad, caps);
gst_caps_unref (caps);
self->caps_width = self->negotiated_width;
self->caps_height = self->negotiated_height;
self->caps_fps_n = self->negotiated_fps_n;
self->caps_fps_d = self->negotiated_fps_d;
return result;
}
static GstFlowReturn
gst_vtdec_chain (GstPad * pad, GstBuffer * buf)
{
GstVTDec *self = GST_VTDEC_CAST (GST_PAD_PARENT (pad));
if (!gst_vtdec_is_negotiated (self))
goto not_negotiated;
if (self->session == NULL || self->negotiated_width < 0)
goto pending_caps;
return gst_vtdec_decode_buffer (self, buf);
not_negotiated:
GST_DEBUG_OBJECT (self, "chain called while not negotiated");
gst_buffer_unref (buf);
return GST_FLOW_NOT_NEGOTIATED;
pending_caps:
gst_buffer_unref (buf);
GST_DEBUG_OBJECT (self, "dropped buffer %p (waiting for complete caps)", buf);
return GST_FLOW_OK;
}
static FigFormatDescription *
gst_vtdec_create_format_description (GstVTDec * self)
{
FigFormatDescription *fmt_desc;
FigStatus status;
status = self->ctx->cm->FigVideoFormatDescriptionCreate (NULL,
self->details->format_id, self->negotiated_width, self->negotiated_height,
NULL, &fmt_desc);
if (status == kFigSuccess)
return fmt_desc;
else
return NULL;
}
static FigFormatDescription *
gst_vtdec_create_format_description_from_codec_data (GstVTDec * self,
GstBuffer * codec_data)
{
FigFormatDescription *fmt_desc;
FigStatus status;
status =
self->ctx->cm->
FigVideoFormatDescriptionCreateWithSampleDescriptionExtensionAtom (NULL,
self->details->format_id, self->negotiated_width, self->negotiated_height,
'avcC', GST_BUFFER_DATA (codec_data), GST_BUFFER_SIZE (codec_data),
&fmt_desc);
if (status == kFigSuccess)
return fmt_desc;
else
return NULL;
}
static VTDecompressionSession *
gst_vtdec_create_session (GstVTDec * self, FigFormatDescription * fmt_desc)
{
VTDecompressionSession *session = NULL;
GstCVApi *cv = self->ctx->cv;
CFMutableDictionaryRef pb_attrs;
VTDecompressionOutputCallback callback;
VTStatus status;
pb_attrs = CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferPixelFormatTypeKey),
kCVPixelFormatType_422YpCbCr8Deprecated);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferWidthKey),
self->negotiated_width);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferHeightKey),
self->negotiated_height);
gst_vtutil_dict_set_i32 (pb_attrs,
*(cv->kCVPixelBufferBytesPerRowAlignmentKey), 2 * self->negotiated_width);
callback.func = gst_vtdec_output_frame;
callback.data = self;
status = self->ctx->vt->VTDecompressionSessionCreate (NULL, fmt_desc,
NULL, pb_attrs, &callback, &session);
GST_INFO_OBJECT (self, "VTDecompressionSessionCreate for %d x %d => %d",
self->negotiated_width, self->negotiated_height, status);
CFRelease (pb_attrs);
return session;
}
static void
gst_vtdec_destroy_session (GstVTDec * self, VTDecompressionSession ** session)
{
self->ctx->vt->VTDecompressionSessionInvalidate (*session);
self->ctx->vt->VTDecompressionSessionRelease (*session);
*session = NULL;
}
static GstFlowReturn
gst_vtdec_decode_buffer (GstVTDec * self, GstBuffer * buf)
{
GstVTApi *vt = self->ctx->vt;
FigSampleBuffer *sbuf;
VTStatus status;
self->cur_inbuf = buf;
self->cur_flowret = GST_FLOW_OK;
sbuf = gst_vtdec_sample_buffer_from (self, buf);
status = vt->VTDecompressionSessionDecodeFrame (self->session, sbuf, 0, 0, 0);
if (status != 0) {
GST_WARNING_OBJECT (self, "VTDecompressionSessionDecodeFrame returned %d",
status);
}
status = vt->VTDecompressionSessionWaitForAsynchronousFrames (self->session);
if (status != 0) {
GST_WARNING_OBJECT (self,
"VTDecompressionSessionWaitForAsynchronousFrames returned %d", status);
}
self->ctx->cm->FigSampleBufferRelease (sbuf);
gst_buffer_unref (buf);
self->cur_inbuf = NULL;
return self->cur_flowret;
}
static void
gst_vtdec_output_frame (void *data, gsize unk1, VTStatus result, gsize unk2,
CVBufferRef cvbuf)
{
GstVTDec *self = GST_VTDEC_CAST (data);
GstBuffer *buf;
if (result != kVTSuccess || self->cur_flowret != GST_FLOW_OK)
goto beach;
if (!gst_vtdec_negotiate_downstream (self))
goto beach;
buf = gst_core_video_buffer_new (self->ctx, cvbuf);
gst_buffer_set_caps (buf, GST_PAD_CAPS (self->srcpad));
gst_buffer_copy_metadata (buf, self->cur_inbuf,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
self->cur_flowret = gst_pad_push (self->srcpad, buf);
beach:
return;
}
static FigSampleBuffer *
gst_vtdec_sample_buffer_from (GstVTDec * self, GstBuffer * buf)
{
GstCMApi *cm = self->ctx->cm;
FigStatus status;
FigBlockBuffer *bbuf = NULL;
FigSampleBuffer *sbuf = NULL;
g_assert (self->fmt_desc != NULL);
status = cm->FigBlockBufferCreateWithMemoryBlock (NULL,
GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf), kCFAllocatorNull, NULL,
0, GST_BUFFER_SIZE (buf), FALSE, &bbuf);
if (status != kFigSuccess)
goto beach;
status = cm->FigSampleBufferCreate (NULL, bbuf, TRUE, 0, 0, self->fmt_desc,
1, 0, NULL, 0, NULL, &sbuf);
if (status != kFigSuccess)
goto beach;
beach:
cm->FigBlockBufferRelease (bbuf);
return sbuf;
}
static void
gst_vtdec_register (GstPlugin * plugin,
const GstVTDecoderDetails * codec_details)
{
GTypeInfo type_info = {
sizeof (GstVTDecClass),
(GBaseInitFunc) gst_vtdec_base_init,
NULL,
(GClassInitFunc) gst_vtdec_class_init,
NULL,
NULL,
sizeof (GstVTDecClass),
0,
(GInstanceInitFunc) gst_vtdec_init,
};
gchar *type_name;
GType type;
gboolean result;
type_name = g_strdup_printf ("vtdec_%s", codec_details->element_name);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &type_info, 0);
g_type_set_qdata (type, GST_VTDEC_CODEC_DETAILS_QDATA,
(gpointer) codec_details);
result = gst_element_register (plugin, type_name, GST_RANK_NONE, type);
if (!result) {
GST_ERROR_OBJECT (plugin, "failed to register element %s", type_name);
}
g_free (type_name);
}
static const GstVTDecoderDetails gst_vtdec_codecs[] = {
{"H.264", "h264", "video/x-h264", kVTFormatH264},
{"JPEG", "jpeg", "image/jpeg", kVTFormatJPEG}
};
void
gst_vtdec_register_elements (GstPlugin * plugin)
{
guint i;
GST_DEBUG_CATEGORY_INIT (gst_vtdec_debug, "vtdec",
0, "Apple VideoToolbox Decoder Wrapper");
for (i = 0; i != G_N_ELEMENTS (gst_vtdec_codecs); i++)
gst_vtdec_register (plugin, &gst_vtdec_codecs[i]);
}

80
sys/applemedia/vtdec.h Normal file
View file

@ -0,0 +1,80 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VTDEC_H__
#define __GST_VTDEC_H__
#include <gst/gst.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_VTDEC_CAST(obj) \
((GstVTDec *) (obj))
#define GST_VTDEC_CLASS_GET_CODEC_DETAILS(klass) \
((const GstVTDecoderDetails *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), \
GST_VTDEC_CODEC_DETAILS_QDATA))
typedef struct _GstVTDecoderDetails GstVTDecoderDetails;
typedef struct _GstVTDecClassParams GstVTDecClassParams;
typedef struct _GstVTDecClass GstVTDecClass;
typedef struct _GstVTDec GstVTDec;
struct _GstVTDecoderDetails
{
const gchar * name;
const gchar * element_name;
const gchar * mimetype;
VTFormatId format_id;
};
struct _GstVTDecClass
{
GstElementClass parent_class;
};
struct _GstVTDec
{
GstElement parent;
const GstVTDecoderDetails * details;
GstPad * sinkpad;
GstPad * srcpad;
GstCoreMediaCtx * ctx;
gint negotiated_width, negotiated_height;
gint negotiated_fps_n, negotiated_fps_d;
gint caps_width, caps_height;
gint caps_fps_n, caps_fps_d;
FigFormatDescription * fmt_desc;
VTDecompressionSession * session;
GstBuffer * cur_inbuf;
GstFlowReturn cur_flowret;
};
void gst_vtdec_register_elements (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_VTDEC_H__ */

945
sys/applemedia/vtenc.c Normal file
View file

@ -0,0 +1,945 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "vtenc.h"
#include "coremediabuffer.h"
#include "vtutil.h"
#define VTENC_DEFAULT_USAGE 6 /* Profile: Baseline Level: 2.1 */
#define VTENC_DEFAULT_BITRATE 768
#define VTENC_MIN_RESET_INTERVAL (GST_SECOND / 2)
GST_DEBUG_CATEGORY (gst_vtenc_debug);
#define GST_CAT_DEFAULT (gst_vtenc_debug)
#define GST_VTENC_CODEC_DETAILS_QDATA \
g_quark_from_static_string ("vtenc-codec-details")
enum
{
PROP_0,
PROP_USAGE,
PROP_BITRATE
};
static GstElementClass *parent_class = NULL;
static void gst_vtenc_get_property (GObject * obj, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_vtenc_set_property (GObject * obj, guint prop_id,
const GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_vtenc_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_vtenc_sink_setcaps (GstPad * pad, GstCaps * caps);
static void gst_vtenc_clear_cached_caps_downstream (GstVTEnc * self);
static GstFlowReturn gst_vtenc_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_vtenc_src_event (GstPad * pad, GstEvent * event);
static VTCompressionSession *gst_vtenc_create_session (GstVTEnc * self);
static void gst_vtenc_destroy_session (GstVTEnc * self,
VTCompressionSession ** session);
static void gst_vtenc_session_dump_properties (GstVTEnc * self,
VTCompressionSession * session);
static void gst_vtenc_session_configure_usage (GstVTEnc * self,
VTCompressionSession * session, gint usage);
static void gst_vtenc_session_configure_expected_framerate (GstVTEnc * self,
VTCompressionSession * session, gdouble framerate);
static void gst_vtenc_session_configure_expected_duration (GstVTEnc * self,
VTCompressionSession * session, gdouble duration);
static void gst_vtenc_session_configure_max_keyframe_interval (GstVTEnc * self,
VTCompressionSession * session, gint interval);
static void gst_vtenc_session_configure_max_keyframe_interval_duration
(GstVTEnc * self, VTCompressionSession * session, gdouble duration);
static void gst_vtenc_session_configure_bitrate (GstVTEnc * self,
VTCompressionSession * session, guint bitrate);
static VTStatus gst_vtenc_session_configure_property_int (GstVTEnc * self,
VTCompressionSession * session, CFStringRef name, gint value);
static VTStatus gst_vtenc_session_configure_property_double (GstVTEnc * self,
VTCompressionSession * session, CFStringRef name, gdouble value);
static GstFlowReturn gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf);
static VTStatus gst_vtenc_output_buffer (void *data, int a2, int a3, int a4,
FigSampleBuffer * sbuf, int a6, int a7);
static gboolean gst_vtenc_buffer_is_keyframe (GstVTEnc * self,
FigSampleBuffer * sbuf);
static void
gst_vtenc_base_init (GstVTEncClass * klass)
{
const GstVTEncoderDetails *codec_details =
GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
const int min_width = 1, max_width = G_MAXINT;
const int min_height = 1, max_height = G_MAXINT;
const int min_fps_n = 0, max_fps_n = G_MAXINT;
const int min_fps_d = 1, max_fps_d = 1;
GstPadTemplate *sink_template, *src_template;
GstCaps *src_caps;
GstElementDetails details;
details.longname = g_strdup_printf ("%s encoder", codec_details->name);
details.klass = g_strdup_printf ("Codec/Encoder/Video");
details.description = g_strdup_printf ("%s encoder", codec_details->name);
gst_element_class_set_details_simple (element_class,
details.longname, details.klass, details.description,
"Ole André Vadla Ravnås <oravnas@cisco.com>");
g_free (details.longname);
g_free (details.klass);
g_free (details.description);
sink_template = gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE,
min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL));
gst_element_class_add_pad_template (element_class, sink_template);
src_caps = gst_caps_new_simple (codec_details->mimetype,
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE,
min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL);
if (codec_details->format_id == kVTFormatH264) {
gst_structure_set (gst_caps_get_structure (src_caps, 0),
"stream-format", G_TYPE_STRING, "avc-sample", NULL);
}
src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
src_caps);
gst_element_class_add_pad_template (element_class, src_template);
}
static void
gst_vtenc_class_init (GstVTEncClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->get_property = gst_vtenc_get_property;
gobject_class->set_property = gst_vtenc_set_property;
gstelement_class->change_state = gst_vtenc_change_state;
g_object_class_install_property (gobject_class, PROP_USAGE,
g_param_spec_int ("usage", "Usage",
"Usage enumeration value",
G_MININT, G_MAXINT, VTENC_DEFAULT_USAGE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BITRATE,
g_param_spec_uint ("bitrate", "Bitrate",
"Target video bitrate in kbps",
1, G_MAXUINT, VTENC_DEFAULT_BITRATE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
}
static void
gst_vtenc_init (GstVTEnc * self)
{
GstVTEncClass *klass = (GstVTEncClass *) G_OBJECT_GET_CLASS (self);
GstElementClass *element_klass = GST_ELEMENT_CLASS (klass);
GstElement *element = GST_ELEMENT (self);
self->details = GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
self->sinkpad = gst_pad_new_from_template
(gst_element_class_get_pad_template (element_klass, "sink"), "sink");
gst_element_add_pad (element, self->sinkpad);
gst_pad_set_setcaps_function (self->sinkpad, gst_vtenc_sink_setcaps);
gst_pad_set_chain_function (self->sinkpad, gst_vtenc_chain);
self->srcpad = gst_pad_new_from_template
(gst_element_class_get_pad_template (element_klass, "src"), "src");
gst_pad_set_event_function (self->srcpad, gst_vtenc_src_event);
gst_element_add_pad (element, self->srcpad);
/* These could be controlled by properties later */
self->dump_properties = FALSE;
self->dump_attributes = FALSE;
}
static gint
gst_vtenc_get_usage (GstVTEnc * self)
{
gint result;
GST_OBJECT_LOCK (self);
result = self->usage;
GST_OBJECT_UNLOCK (self);
return result;
}
static void
gst_vtenc_set_usage (GstVTEnc * self, gint usage)
{
GST_OBJECT_LOCK (self);
self->usage = usage;
if (self->session != NULL)
gst_vtenc_session_configure_usage (self, self->session, usage);
GST_OBJECT_UNLOCK (self);
}
static guint
gst_vtenc_get_bitrate (GstVTEnc * self)
{
guint result;
GST_OBJECT_LOCK (self);
result = self->bitrate;
GST_OBJECT_UNLOCK (self);
return result;
}
static void
gst_vtenc_set_bitrate (GstVTEnc * self, guint bitrate)
{
GST_OBJECT_LOCK (self);
self->bitrate = bitrate;
if (self->session != NULL)
gst_vtenc_session_configure_bitrate (self, self->session, bitrate);
GST_OBJECT_UNLOCK (self);
}
static void
gst_vtenc_get_property (GObject * obj, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVTEnc *self = GST_VTENC_CAST (obj);
switch (prop_id) {
case PROP_USAGE:
g_value_set_int (value, gst_vtenc_get_usage (self));
break;
case PROP_BITRATE:
g_value_set_uint (value, gst_vtenc_get_bitrate (self) * 8 / 1000);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
break;
}
}
static void
gst_vtenc_set_property (GObject * obj, guint prop_id, const GValue * value,
GParamSpec * pspec)
{
GstVTEnc *self = GST_VTENC_CAST (obj);
switch (prop_id) {
case PROP_USAGE:
gst_vtenc_set_usage (self, g_value_get_int (value));
break;
case PROP_BITRATE:
gst_vtenc_set_bitrate (self, g_value_get_uint (value) * 1000 / 8);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_vtenc_change_state (GstElement * element, GstStateChange transition)
{
GstVTEnc *self = GST_VTENC_CAST (element);
GError *error = NULL;
GstStateChangeReturn ret;
if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
self->ctx = gst_core_media_ctx_new (GST_API_CORE_VIDEO | GST_API_CORE_MEDIA
| GST_API_VIDEO_TOOLBOX, &error);
if (error != NULL)
goto api_error;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (transition == GST_STATE_CHANGE_READY_TO_NULL) {
GST_OBJECT_LOCK (self);
gst_vtenc_destroy_session (self, &self->session);
if (self->options != NULL) {
CFRelease (self->options);
self->options = NULL;
}
self->negotiated_width = self->negotiated_height = 0;
self->negotiated_fps_n = self->negotiated_fps_d = 0;
gst_vtenc_clear_cached_caps_downstream (self);
GST_OBJECT_UNLOCK (self);
g_object_unref (self->ctx);
self->ctx = NULL;
}
return ret;
api_error:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("API error"),
("%s", error->message));
g_clear_error (&error);
return GST_STATE_CHANGE_FAILURE;
}
}
static gboolean
gst_vtenc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstVTEnc *self = GST_VTENC_CAST (GST_PAD_PARENT (pad));
GstStructure *structure;
VTCompressionSession *session;
GST_OBJECT_LOCK (self);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &self->negotiated_width);
gst_structure_get_int (structure, "height", &self->negotiated_height);
gst_structure_get_fraction (structure, "framerate",
&self->negotiated_fps_n, &self->negotiated_fps_d);
gst_vtenc_destroy_session (self, &self->session);
GST_OBJECT_UNLOCK (self);
session = gst_vtenc_create_session (self);
GST_OBJECT_LOCK (self);
self->session = session;
if (self->options != NULL)
CFRelease (self->options);
self->options = CFDictionaryCreateMutable (NULL, 0,
&kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
GST_OBJECT_UNLOCK (self);
return TRUE;
}
static gboolean
gst_vtenc_is_negotiated (GstVTEnc * self)
{
return self->negotiated_width != 0;
}
static gboolean
gst_vtenc_negotiate_downstream (GstVTEnc * self, FigSampleBuffer * sbuf)
{
gboolean result;
GstCMApi *cm = self->ctx->cm;
GstCaps *caps;
GstStructure *s;
if (self->caps_width == self->negotiated_width &&
self->caps_height == self->negotiated_height &&
self->caps_fps_n == self->negotiated_fps_n &&
self->caps_fps_d == self->negotiated_fps_d) {
return TRUE;
}
caps = gst_caps_copy (gst_pad_get_pad_template_caps (self->srcpad));
s = gst_caps_get_structure (caps, 0);
gst_structure_set (s,
"width", G_TYPE_INT, self->negotiated_width,
"height", G_TYPE_INT, self->negotiated_height,
"framerate", GST_TYPE_FRACTION,
self->negotiated_fps_n, self->negotiated_fps_d, NULL);
if (self->details->format_id == kVTFormatH264) {
FigFormatDescription *fmt;
CFDictionaryRef atoms;
CFStringRef avccKey;
CFDataRef avcc;
GstBuffer *codec_data;
fmt = cm->FigSampleBufferGetFormatDescription (sbuf);
atoms = cm->FigFormatDescriptionGetExtension (fmt,
*(cm->kFigFormatDescriptionExtension_SampleDescriptionExtensionAtoms));
avccKey = CFStringCreateWithCString (NULL, "avcC", kCFStringEncodingUTF8);
avcc = CFDictionaryGetValue (atoms, avccKey);
CFRelease (avccKey);
codec_data = gst_buffer_new_and_alloc (CFDataGetLength (avcc));
CFDataGetBytes (avcc, CFRangeMake (0, CFDataGetLength (avcc)),
GST_BUFFER_DATA (codec_data));
gst_structure_set (s, "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
gst_buffer_unref (codec_data);
}
GST_OBJECT_UNLOCK (self);
result = gst_pad_set_caps (self->srcpad, caps);
GST_OBJECT_LOCK (self);
gst_caps_unref (caps);
self->caps_width = self->negotiated_width;
self->caps_height = self->negotiated_height;
self->caps_fps_n = self->negotiated_fps_n;
self->caps_fps_d = self->negotiated_fps_d;
return result;
}
static void
gst_vtenc_clear_cached_caps_downstream (GstVTEnc * self)
{
self->caps_width = self->caps_height = 0;
self->caps_fps_n = self->caps_fps_d = 0;
}
static GstFlowReturn
gst_vtenc_chain (GstPad * pad, GstBuffer * buf)
{
GstVTEnc *self = GST_VTENC_CAST (GST_PAD_PARENT (pad));
if (!gst_vtenc_is_negotiated (self))
goto not_negotiated;
return gst_vtenc_encode_frame (self, buf);
not_negotiated:
gst_buffer_unref (buf);
return GST_FLOW_NOT_NEGOTIATED;
}
static gboolean
gst_vtenc_src_event (GstPad * pad, GstEvent * event)
{
GstVTEnc *self = GST_VTENC_CAST (GST_PAD_PARENT (pad));
gboolean ret = TRUE;
gboolean handled = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
if (gst_event_has_name (event, "rtcp-pli")) {
GST_OBJECT_LOCK (self);
if (self->options != NULL) {
GST_INFO_OBJECT (self, "received PLI, will force intra");
CFDictionaryAddValue (self->options,
*(self->ctx->vt->kVTEncodeFrameOptionKey_ForceKeyFrame),
kCFBooleanTrue);
} else {
GST_INFO_OBJECT (self,
"received PLI but encode not yet started, ignoring");
}
GST_OBJECT_UNLOCK (self);
handled = TRUE;
}
break;
default:
break;
}
if (handled)
gst_event_unref (event);
else
ret = gst_pad_push_event (self->sinkpad, event);
return ret;
}
static VTCompressionSession *
gst_vtenc_create_session (GstVTEnc * self)
{
VTCompressionSession *session = NULL;
GstCVApi *cv = self->ctx->cv;
GstVTApi *vt = self->ctx->vt;
CFMutableDictionaryRef pb_attrs;
VTCompressionOutputCallback callback;
VTStatus status;
pb_attrs = CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferPixelFormatTypeKey),
kCVPixelFormatType_422YpCbCr8Deprecated);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferWidthKey),
self->negotiated_width);
gst_vtutil_dict_set_i32 (pb_attrs, *(cv->kCVPixelBufferHeightKey),
self->negotiated_height);
gst_vtutil_dict_set_i32 (pb_attrs,
*(cv->kCVPixelBufferBytesPerRowAlignmentKey), 2 * self->negotiated_width);
callback.func = gst_vtenc_output_buffer;
callback.data = self;
status = vt->VTCompressionSessionCreate (NULL,
self->negotiated_width, self->negotiated_height,
self->details->format_id, 0, pb_attrs, 0, callback, &session);
GST_INFO_OBJECT (self, "VTCompressionSessionCreate for %d x %d => %d",
self->negotiated_width, self->negotiated_height, status);
if (status != kVTSuccess)
goto beach;
GST_OBJECT_LOCK (self);
if (GST_ELEMENT_CLOCK (self) != NULL) {
self->last_create_session = gst_clock_get_time (GST_ELEMENT_CLOCK (self));
} else {
self->last_create_session = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (self);
if (self->dump_properties) {
gst_vtenc_session_dump_properties (self, session);
self->dump_properties = FALSE;
}
gst_vtenc_session_configure_usage (self, session, gst_vtenc_get_usage (self));
gst_vtenc_session_configure_expected_framerate (self, session,
(gdouble) self->negotiated_fps_n / (gdouble) self->negotiated_fps_d);
gst_vtenc_session_configure_expected_duration (self, session,
(gdouble) self->negotiated_fps_d / (gdouble) self->negotiated_fps_n);
status = vt->VTCompressionSessionSetProperty (session,
*(vt->kVTCompressionPropertyKey_ProfileLevel),
*(vt->kVTProfileLevel_H264_Baseline_1_3));
GST_DEBUG_OBJECT (self, "kVTCompressionPropertyKey_ProfileLevel => %d",
status);
status = vt->VTCompressionSessionSetProperty (session,
*(vt->kVTCompressionPropertyKey_AllowTemporalCompression),
kCFBooleanTrue);
GST_DEBUG_OBJECT (self,
"kVTCompressionPropertyKey_AllowTemporalCompression => %d", status);
gst_vtenc_session_configure_max_keyframe_interval (self, session, 0);
gst_vtenc_session_configure_max_keyframe_interval_duration (self, session,
G_MAXDOUBLE);
gst_vtenc_session_configure_bitrate (self, session,
gst_vtenc_get_bitrate (self));
beach:
CFRelease (pb_attrs);
return session;
}
static void
gst_vtenc_destroy_session (GstVTEnc * self, VTCompressionSession ** session)
{
self->ctx->vt->VTCompressionSessionInvalidate (*session);
self->ctx->vt->VTCompressionSessionRelease (*session);
*session = NULL;
}
typedef struct
{
GstVTEnc *self;
GstVTApi *vt;
VTCompressionSession *session;
} GstVTDumpPropCtx;
static void
gst_vtenc_session_dump_property (CFStringRef prop_name,
CFDictionaryRef prop_attrs, GstVTDumpPropCtx * dpc)
{
gchar *name_str;
CFTypeRef prop_value;
VTStatus status;
name_str = gst_vtutil_string_to_utf8 (prop_name);
if (dpc->self->dump_attributes) {
gchar *attrs_str;
attrs_str = gst_vtutil_object_to_string (prop_attrs);
GST_DEBUG_OBJECT (dpc->self, "%s = %s", name_str, attrs_str);
g_free (attrs_str);
}
status = dpc->vt->VTCompressionSessionCopyProperty (dpc->session, prop_name,
NULL, &prop_value);
if (status == kVTSuccess) {
gchar *value_str;
value_str = gst_vtutil_object_to_string (prop_value);
GST_DEBUG_OBJECT (dpc->self, "%s = %s", name_str, value_str);
g_free (value_str);
if (prop_value != NULL)
CFRelease (prop_value);
} else {
GST_DEBUG_OBJECT (dpc->self, "%s = <failed to query: %d>",
name_str, status);
}
g_free (name_str);
}
static void
gst_vtenc_session_dump_properties (GstVTEnc * self,
VTCompressionSession * session)
{
GstVTDumpPropCtx dpc = { self, self->ctx->vt, session };
CFDictionaryRef dict;
VTStatus status;
status = self->ctx->vt->VTCompressionSessionCopySupportedPropertyDictionary
(session, &dict);
if (status != kVTSuccess)
goto error;
CFDictionaryApplyFunction (dict,
(CFDictionaryApplierFunction) gst_vtenc_session_dump_property, &dpc);
CFRelease (dict);
return;
error:
GST_WARNING_OBJECT (self, "failed to dump properties");
}
static void
gst_vtenc_session_configure_usage (GstVTEnc * self,
VTCompressionSession * session, gint usage)
{
gst_vtenc_session_configure_property_int (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_Usage), usage);
}
static void
gst_vtenc_session_configure_expected_framerate (GstVTEnc * self,
VTCompressionSession * session, gdouble framerate)
{
gst_vtenc_session_configure_property_double (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_ExpectedFrameRate), framerate);
}
static void
gst_vtenc_session_configure_expected_duration (GstVTEnc * self,
VTCompressionSession * session, gdouble duration)
{
gst_vtenc_session_configure_property_double (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_ExpectedDuration), duration);
}
static void
gst_vtenc_session_configure_max_keyframe_interval (GstVTEnc * self,
VTCompressionSession * session, gint interval)
{
gst_vtenc_session_configure_property_int (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_MaxKeyFrameInterval),
interval);
}
static void
gst_vtenc_session_configure_max_keyframe_interval_duration (GstVTEnc * self,
VTCompressionSession * session, gdouble duration)
{
gst_vtenc_session_configure_property_double (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration),
duration);
}
static void
gst_vtenc_session_configure_bitrate (GstVTEnc * self,
VTCompressionSession * session, guint bitrate)
{
gst_vtenc_session_configure_property_int (self, session,
*(self->ctx->vt->kVTCompressionPropertyKey_AverageDataRate), bitrate);
}
static VTStatus
gst_vtenc_session_configure_property_int (GstVTEnc * self,
VTCompressionSession * session, CFStringRef name, gint value)
{
CFNumberRef num;
VTStatus status;
gchar name_str[128];
num = CFNumberCreate (NULL, kCFNumberIntType, &value);
status = self->ctx->vt->VTCompressionSessionSetProperty (session, name, num);
CFRelease (num);
CFStringGetCString (name, name_str, sizeof (name_str), kCFStringEncodingUTF8);
GST_DEBUG_OBJECT (self, "%s(%d) => %d", name_str, value, status);
return status;
}
static VTStatus
gst_vtenc_session_configure_property_double (GstVTEnc * self,
VTCompressionSession * session, CFStringRef name, gdouble value)
{
CFNumberRef num;
VTStatus status;
gchar name_str[128];
num = CFNumberCreate (NULL, kCFNumberDoubleType, &value);
status = self->ctx->vt->VTCompressionSessionSetProperty (session, name, num);
CFRelease (num);
CFStringGetCString (name, name_str, sizeof (name_str), kCFStringEncodingUTF8);
GST_DEBUG_OBJECT (self, "%s(%f) => %d", name_str, value, status);
return status;
}
static GstFlowReturn
gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf)
{
GstCVApi *cv = self->ctx->cv;
GstVTApi *vt = self->ctx->vt;
FigTime ts, duration;
CVPixelBufferRef pbuf = NULL;
VTStatus vt_status;
self->cur_inbuf = buf;
self->cur_flowret = GST_FLOW_OK;
ts = self->ctx->cm->FigTimeMake
(GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000);
duration = self->ctx->cm->FigTimeMake
(GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000);
if (GST_IS_CORE_MEDIA_BUFFER (buf)) {
GstCoreMediaBuffer *cmbuf = GST_CORE_MEDIA_BUFFER_CAST (buf);
pbuf = gst_core_media_buffer_get_pixel_buffer (cmbuf);
}
if (pbuf == NULL) {
CVReturn cv_ret;
cv_ret = cv->CVPixelBufferCreateWithBytes (NULL,
self->negotiated_width, self->negotiated_height,
kCVPixelFormatType_422YpCbCr8Deprecated, GST_BUFFER_DATA (buf),
self->negotiated_width * 2,
(CVPixelBufferReleaseBytesCallback) gst_buffer_unref, buf, NULL, &pbuf);
if (cv_ret != kCVReturnSuccess)
goto cv_error;
gst_buffer_ref (buf);
}
GST_OBJECT_LOCK (self);
self->expect_keyframe = CFDictionaryContainsKey (self->options,
*(vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
if (self->expect_keyframe) {
gst_vtenc_clear_cached_caps_downstream (self);
if (self->reset_on_force_keyframe) {
VTCompressionSession *session;
gst_vtenc_destroy_session (self, &self->session);
if (GST_CLOCK_TIME_IS_VALID (self->last_create_session) &&
GST_ELEMENT_CLOCK (self) != NULL) {
GstClockTime now = gst_clock_get_time (GST_ELEMENT_CLOCK (self));
GstClockTimeDiff diff = GST_CLOCK_DIFF (self->last_create_session, now);
if (diff < VTENC_MIN_RESET_INTERVAL) {
GST_OBJECT_UNLOCK (self);
goto skip_frame;
}
}
GST_OBJECT_UNLOCK (self);
session = gst_vtenc_create_session (self);
GST_OBJECT_LOCK (self);
self->session = session;
}
}
vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session,
pbuf, ts, duration, self->options, NULL, NULL);
if (vt_status != 0) {
GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
vt_status);
}
self->ctx->vt->VTCompressionSessionCompleteFrames (self->session,
*(self->ctx->cm->kFigTimeInvalid));
if (!self->expect_keyframe) {
CFDictionaryRemoveValue (self->options,
*(self->ctx->vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
}
GST_OBJECT_UNLOCK (self);
cv->CVPixelBufferRelease (pbuf);
gst_buffer_unref (buf);
self->cur_inbuf = NULL;
return self->cur_flowret;
skip_frame:
{
GST_DEBUG_OBJECT (self, "skipping frame");
cv->CVPixelBufferRelease (pbuf);
gst_buffer_unref (buf);
self->cur_inbuf = NULL;
return GST_FLOW_OK;
}
cv_error:
{
gst_buffer_unref (buf);
self->cur_inbuf = NULL;
return GST_FLOW_ERROR;
}
}
static VTStatus
gst_vtenc_output_buffer (void *data, int a2, int a3, int a4,
FigSampleBuffer * sbuf, int a6, int a7)
{
GstVTEnc *self = data;
gboolean is_keyframe;
GstBuffer *buf;
/* This may happen if we don't have enough bitrate */
if (sbuf == NULL)
goto beach;
if (!gst_vtenc_negotiate_downstream (self, sbuf))
goto beach;
is_keyframe = gst_vtenc_buffer_is_keyframe (self, sbuf);
if (self->expect_keyframe && !is_keyframe)
goto expected_keyframe;
self->expect_keyframe = FALSE;
buf = gst_core_media_buffer_new (self->ctx, sbuf);
gst_buffer_set_caps (buf, GST_PAD_CAPS (self->srcpad));
gst_buffer_copy_metadata (buf, self->cur_inbuf, GST_BUFFER_COPY_TIMESTAMPS);
if (is_keyframe) {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
} else {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
GST_OBJECT_UNLOCK (self);
self->cur_flowret = gst_pad_push (self->srcpad, buf);
GST_OBJECT_LOCK (self);
return kVTSuccess;
beach:
return kVTSuccess;
expected_keyframe:
{
GST_INFO_OBJECT (self, "expected keyframe but output was not, "
"enabling reset_on_force_keyframe");
self->reset_on_force_keyframe = TRUE;
return kVTSuccess;
}
}
static gboolean
gst_vtenc_buffer_is_keyframe (GstVTEnc * self, FigSampleBuffer * sbuf)
{
gboolean result = FALSE;
CFArrayRef attachments_for_sample;
attachments_for_sample =
self->ctx->cm->FigSampleBufferGetSampleAttachmentsArray (sbuf, 0);
if (attachments_for_sample != NULL) {
CFDictionaryRef attachments;
CFBooleanRef depends_on_others;
attachments = CFArrayGetValueAtIndex (attachments_for_sample, 0);
depends_on_others = CFDictionaryGetValue (attachments,
*(self->ctx->cm->kFigSampleAttachmentKey_DependsOnOthers));
result = (depends_on_others == kCFBooleanFalse);
}
return result;
}
static void
gst_vtenc_register (GstPlugin * plugin,
const GstVTEncoderDetails * codec_details)
{
GTypeInfo type_info = {
sizeof (GstVTEncClass),
(GBaseInitFunc) gst_vtenc_base_init,
NULL,
(GClassInitFunc) gst_vtenc_class_init,
NULL,
NULL,
sizeof (GstVTEncClass),
0,
(GInstanceInitFunc) gst_vtenc_init,
};
gchar *type_name;
GType type;
gboolean result;
type_name = g_strdup_printf ("vtenc_%s", codec_details->element_name);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &type_info, 0);
g_type_set_qdata (type, GST_VTENC_CODEC_DETAILS_QDATA,
(gpointer) codec_details);
result = gst_element_register (plugin, type_name, GST_RANK_NONE, type);
if (!result) {
GST_ERROR_OBJECT (plugin, "failed to register element %s", type_name);
}
g_free (type_name);
}
static const GstVTEncoderDetails gst_vtenc_codecs[] = {
{"H.264", "h264", "video/x-h264", kVTFormatH264},
};
void
gst_vtenc_register_elements (GstPlugin * plugin)
{
guint i;
GST_DEBUG_CATEGORY_INIT (gst_vtenc_debug, "vtenc",
0, "Apple VideoToolbox Encoder Wrapper");
for (i = 0; i != G_N_ELEMENTS (gst_vtenc_codecs); i++)
gst_vtenc_register (plugin, &gst_vtenc_codecs[i]);
}

89
sys/applemedia/vtenc.h Normal file
View file

@ -0,0 +1,89 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VTENC_H__
#define __GST_VTENC_H__
#include <gst/gst.h>
#include "coremediactx.h"
G_BEGIN_DECLS
#define GST_VTENC_CAST(obj) \
((GstVTEnc *) (obj))
#define GST_VTENC_CLASS_GET_CODEC_DETAILS(klass) \
((const GstVTEncoderDetails *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), \
GST_VTENC_CODEC_DETAILS_QDATA))
typedef struct _GstVTEncoderDetails GstVTEncoderDetails;
typedef struct _GstVTEncClassParams GstVTEncClassParams;
typedef struct _GstVTEncClass GstVTEncClass;
typedef struct _GstVTEnc GstVTEnc;
struct _GstVTEncoderDetails
{
const gchar * name;
const gchar * element_name;
const gchar * mimetype;
VTFormatId format_id;
};
struct _GstVTEncClass
{
GstElementClass parent_class;
};
struct _GstVTEnc
{
GstElement parent;
const GstVTEncoderDetails * details;
GstPad * sinkpad;
GstPad * srcpad;
gint usage;
guint bitrate;
GstCoreMediaCtx * ctx;
gboolean dump_properties;
gboolean dump_attributes;
gboolean reset_on_force_keyframe;
GstClockTime last_create_session;
gint negotiated_width, negotiated_height;
gint negotiated_fps_n, negotiated_fps_d;
gint caps_width, caps_height;
gint caps_fps_n, caps_fps_d;
VTCompressionSession * session;
CFMutableDictionaryRef options;
GstBuffer * cur_inbuf;
GstFlowReturn cur_flowret;
gboolean expect_keyframe;
};
void gst_vtenc_register_elements (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_VTENC_H__ */

View file

@ -0,0 +1,322 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "vth264decbin.h"
#include <string.h>
#include <gst/video/video.h>
#define VT_H264_DEC_BIN_ERROR_STATE_DEFAULT FALSE
GST_DEBUG_CATEGORY_STATIC (gst_vt_h264_dec_bin_debug);
#define GST_CAT_DEFAULT gst_vt_h264_dec_bin_debug
enum
{
PROP_0,
PROP_ERROR_STATE,
PROP_HAPPY
};
enum
{
H264PARSE_OUTPUT_FORMAT_AVC_SAMPLE = 0,
H264PARSE_OUTPUT_FORMAT_BYTE_STREAM = 1,
H264PARSE_OUTPUT_FORMAT_INPUT = 2
};
static GstStaticPadTemplate vth264decbin_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, "
"stream-format = (string) { byte-stream, avc-sample }")
);
static GstStaticPadTemplate vth264decbin_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YUY2"))
);
#define TAA_VT_H264_DEC_BIN_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VT_H264_DEC_BIN, \
GstVTH264DecBinPrivate))
struct _GstVTH264DecBinPrivate
{
GstElement *parser;
GstPad *parser_sinkpad;
GstElement *decoder;
GstPad *decoder_srcpad;
gboolean error_state;
gboolean seen_output;
GstClockTime prev_input_ts;
gulong output_probe;
};
GST_BOILERPLATE (GstVTH264DecBin, gst_vt_h264_dec_bin, GstBin, GST_TYPE_BIN);
static gboolean gst_vt_h264_dec_bin_on_output (GstPad * pad,
GstMiniObject * mini_obj, gpointer user_data);
static void
gst_vt_h264_dec_bin_update_error_state (GstVTH264DecBin * self,
gboolean error_state)
{
GstVTH264DecBinPrivate *priv = self->priv;
GObject *obj = (GObject *) self;
GST_OBJECT_LOCK (self);
priv->error_state = error_state;
GST_OBJECT_UNLOCK (self);
if (priv->output_probe == 0 && (error_state || !priv->seen_output)) {
GST_DEBUG_OBJECT (self, "attaching buffer probe");
priv->output_probe = gst_pad_add_buffer_probe (priv->decoder_srcpad,
G_CALLBACK (gst_vt_h264_dec_bin_on_output), self);
} else if (priv->output_probe != 0 && (!error_state && priv->seen_output)) {
GST_DEBUG_OBJECT (self, "detaching buffer probe");
gst_pad_remove_buffer_probe (priv->decoder_srcpad, priv->output_probe);
priv->output_probe = 0;
}
g_object_notify (obj, "error-state");
g_object_notify (obj, "happy");
}
static gboolean
gst_vt_h264_dec_bin_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (GST_PAD_PARENT (pad));
const gchar *format;
gint output_format;
gboolean access_unit;
format = gst_structure_get_string (gst_caps_get_structure (caps, 0),
"stream-format");
if (format == NULL)
goto no_stream_format;
if (strcmp (format, "byte-stream") == 0) {
output_format = H264PARSE_OUTPUT_FORMAT_AVC_SAMPLE;
access_unit = TRUE;
} else {
output_format = H264PARSE_OUTPUT_FORMAT_INPUT;
access_unit = FALSE;
}
g_object_set (self->priv->parser, "output-format", output_format,
"access-unit", access_unit, NULL);
return gst_pad_set_caps (GST_PAD_PEER (self->priv->parser_sinkpad), caps);
no_stream_format:
return FALSE;
}
static gboolean
gst_vt_h264_dec_bin_sink_event (GstPad * pad, GstEvent * event)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (GST_PAD_PARENT (pad));
GstVTH264DecBinPrivate *priv = self->priv;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
if (priv->seen_output) {
GST_DEBUG_OBJECT (self, "error state ON because of packetloss");
gst_vt_h264_dec_bin_update_error_state (self, TRUE);
}
break;
case GST_EVENT_FLUSH_STOP:
priv->seen_output = FALSE;
priv->prev_input_ts = GST_CLOCK_TIME_NONE;
GST_DEBUG_OBJECT (self, "error state OFF because of FLUSH_STOP");
gst_vt_h264_dec_bin_update_error_state (self, FALSE);
break;
default:
break;
}
return gst_pad_push_event (GST_PAD_PEER (priv->parser_sinkpad), event);
}
static GstFlowReturn
gst_vt_h264_dec_bin_sink_chain (GstPad * pad, GstBuffer * buffer)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (GST_PAD_PARENT (pad));
GstVTH264DecBinPrivate *priv = self->priv;
GstClockTime cur_ts;
GstFlowReturn flow_ret;
cur_ts = GST_BUFFER_TIMESTAMP (buffer);
gst_vt_h264_dec_bin_update_error_state (self, priv->error_state);
flow_ret = gst_pad_push (GST_PAD_PEER (priv->parser_sinkpad), buffer);
if (!priv->seen_output && !priv->error_state &&
GST_CLOCK_TIME_IS_VALID (priv->prev_input_ts)) {
if (cur_ts != priv->prev_input_ts) {
GST_DEBUG_OBJECT (self,
"error state ON because of no output and detected timestamp gap");
gst_vt_h264_dec_bin_update_error_state (self, TRUE);
}
}
priv->prev_input_ts = cur_ts;
return flow_ret;
}
static gboolean
gst_vt_h264_dec_bin_on_output (GstPad * pad, GstMiniObject * mini_obj,
gpointer user_data)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (user_data);
self->priv->seen_output = TRUE;
GST_DEBUG_OBJECT (self, "error state OFF because we saw output");
gst_vt_h264_dec_bin_update_error_state (self, FALSE);
return TRUE;
}
static void
gst_vt_h264_dec_bin_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"VTH264DecBin",
"Decoder/Video",
"VideoToolbox H.264 decoder bin",
"Ole André Vadla Ravnås <oravnas@cisco.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&vth264decbin_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&vth264decbin_src_template));
}
static void
gst_vt_h264_dec_bin_init (GstVTH264DecBin * self, GstVTH264DecBinClass * gclass)
{
GstVTH264DecBinPrivate *priv;
GstPad *ghost_pad;
self->priv = priv = TAA_VT_H264_DEC_BIN_GET_PRIVATE (self);
priv->parser = gst_element_factory_make ("h264parse", "parser");
priv->decoder = gst_element_factory_make ("vtdec_h264", "decoder");
gst_bin_add_many (GST_BIN_CAST (self), priv->parser, priv->decoder, NULL);
gst_element_link (priv->parser, priv->decoder);
priv->parser_sinkpad = gst_element_get_static_pad (priv->parser, "sink");
ghost_pad = gst_ghost_pad_new_from_template ("sink", priv->parser_sinkpad,
gst_static_pad_template_get (&vth264decbin_sink_template));
gst_pad_set_setcaps_function (ghost_pad, gst_vt_h264_dec_bin_sink_setcaps);
gst_pad_set_event_function (ghost_pad, gst_vt_h264_dec_bin_sink_event);
gst_pad_set_chain_function (ghost_pad, gst_vt_h264_dec_bin_sink_chain);
gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
priv->decoder_srcpad = gst_element_get_static_pad (priv->decoder, "src");
ghost_pad = gst_ghost_pad_new_from_template ("src", priv->decoder_srcpad,
gst_static_pad_template_get (&vth264decbin_src_template));
gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
priv->seen_output = FALSE;
priv->prev_input_ts = GST_CLOCK_TIME_NONE;
}
static void
gst_vt_h264_dec_bin_dispose (GObject * obj)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (obj);
GstVTH264DecBinPrivate *priv = self->priv;
if (priv->parser_sinkpad != NULL) {
gst_object_unref (priv->parser_sinkpad);
priv->parser_sinkpad = NULL;
}
if (priv->decoder_srcpad != NULL) {
gst_object_unref (priv->decoder_srcpad);
priv->decoder_srcpad = NULL;
}
G_OBJECT_CLASS (parent_class)->dispose (obj);
}
static void
gst_vt_h264_dec_bin_get_property (GObject * obj, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVTH264DecBin *self = GST_VT_H264_DEC_BIN_CAST (obj);
switch (prop_id) {
case PROP_ERROR_STATE:
GST_OBJECT_LOCK (self);
g_value_set_boolean (value, self->priv->error_state);
GST_OBJECT_UNLOCK (self);
break;
case PROP_HAPPY:
GST_OBJECT_LOCK (self);
g_value_set_boolean (value, !self->priv->error_state);
GST_OBJECT_UNLOCK (self);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
break;
}
}
static void
gst_vt_h264_dec_bin_class_init (GstVTH264DecBinClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
gobject_class->dispose = gst_vt_h264_dec_bin_dispose;
gobject_class->get_property = gst_vt_h264_dec_bin_get_property;
g_type_class_add_private (klass, sizeof (GstVTH264DecBinPrivate));
g_object_class_install_property (gobject_class, PROP_ERROR_STATE,
g_param_spec_boolean ("error-state", "Error State",
"Whether the decoder is currently in an error state",
VT_H264_DEC_BIN_ERROR_STATE_DEFAULT,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_HAPPY,
g_param_spec_boolean ("happy", "Happy",
"Whether the decoder is currently not in an error state",
!VT_H264_DEC_BIN_ERROR_STATE_DEFAULT,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (gst_vt_h264_dec_bin_debug,
"vth264decbin", 0, "VideoToolbox H.264 decoder bin");
}

View file

@ -0,0 +1,61 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VTH264DECBIN_H__
#define __GST_VTH264DECBIN_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_VT_H264_DEC_BIN (gst_vt_h264_dec_bin_get_type())
#define GST_VT_H264_DEC_BIN(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VT_H264_DEC_BIN, \
GstVTH264DecBin))
#define GST_VT_H264_DEC_BIN_CAST(obj) \
((GstVTH264DecBin *) (obj))
#define GST_VT_H264_DEC_BIN_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VT_H264_DEC_BIN, \
GstVTH264DecBinClass))
#define GST_IS_VT_H264_DEC_BIN(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VT_H264_DEC_BIN))
#define GST_IS_VT_H264_DEC_BIN_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VT_H264_DEC_BIN))
typedef struct _GstVTH264DecBin GstVTH264DecBin;
typedef struct _GstVTH264DecBinPrivate GstVTH264DecBinPrivate;
typedef struct _GstVTH264DecBinClass GstVTH264DecBinClass;
struct _GstVTH264DecBin
{
GstBin parent;
GstVTH264DecBinPrivate * priv;
};
struct _GstVTH264DecBinClass
{
GstBinClass parent_class;
};
GType gst_vt_h264_dec_bin_get_type (void);
G_END_DECLS
#endif /* __GST_VTH264DECBIN_H__ */

View file

@ -0,0 +1,173 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "vth264encbin.h"
#include <string.h>
#include <gst/video/video.h>
#define VT_H264_ENC_BIN_DEFAULT_BITRATE 768
GST_DEBUG_CATEGORY_STATIC (gst_vt_h264_enc_bin_debug);
#define GST_CAT_DEFAULT gst_vt_h264_enc_bin_debug
enum
{
PROP_0,
PROP_BITRATE
};
enum
{
H264PARSE_OUTPUT_FORMAT_AVC_SAMPLE = 0,
H264PARSE_OUTPUT_FORMAT_BYTE_STREAM = 1,
H264PARSE_OUTPUT_FORMAT_INPUT = 2
};
static GstStaticPadTemplate vth264encbin_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("YUY2"))
);
static GstStaticPadTemplate vth264encbin_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, stream-format = (string) byte-stream"));
#define TAA_VT_H264_ENC_BIN_GET_PRIVATE(obj) \
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VT_H264_ENC_BIN, \
GstVTH264EncBinPrivate))
struct _GstVTH264EncBinPrivate
{
GstElement *encoder;
GstElement *parser;
};
GST_BOILERPLATE (GstVTH264EncBin, gst_vt_h264_enc_bin, GstBin, GST_TYPE_BIN);
static void
gst_vt_h264_enc_bin_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"VTH264EncBin",
"Encoder/Video",
"VideoToolbox H.264 encoder bin",
"Ole André Vadla Ravnås <oravnas@cisco.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&vth264encbin_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&vth264encbin_src_template));
}
static void
gst_vt_h264_enc_bin_init (GstVTH264EncBin * self, GstVTH264EncBinClass * gclass)
{
GstVTH264EncBinPrivate *priv;
GstPad *encoder_sinkpad, *parser_srcpad, *ghost_pad;
self->priv = priv = TAA_VT_H264_ENC_BIN_GET_PRIVATE (self);
priv->encoder = gst_element_factory_make ("vtenc_h264", "encoder");
priv->parser = gst_element_factory_make ("h264parse", "parser");
gst_bin_add_many (GST_BIN_CAST (self), priv->encoder, priv->parser, NULL);
gst_element_link (priv->encoder, priv->parser);
encoder_sinkpad = gst_element_get_static_pad (priv->encoder, "sink");
ghost_pad = gst_ghost_pad_new_from_template ("sink", encoder_sinkpad,
gst_static_pad_template_get (&vth264encbin_sink_template));
gst_object_unref (encoder_sinkpad);
gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
parser_srcpad = gst_element_get_static_pad (priv->parser, "src");
ghost_pad = gst_ghost_pad_new_from_template ("src", parser_srcpad,
gst_static_pad_template_get (&vth264encbin_src_template));
gst_object_unref (parser_srcpad);
gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
g_object_set (priv->encoder, "usage", 6, NULL);
g_object_set (priv->parser,
"output-format", H264PARSE_OUTPUT_FORMAT_BYTE_STREAM,
"split-packetized", TRUE, NULL);
}
static void
gst_vt_h264_enc_bin_get_property (GObject * obj, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVTH264EncBin *self = GST_VT_H264_ENC_BIN_CAST (obj);
switch (prop_id) {
case PROP_BITRATE:
g_object_get_property (G_OBJECT (self->priv->encoder),
g_param_spec_get_name (pspec), value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
break;
}
}
static void
gst_vt_h264_enc_bin_set_property (GObject * obj, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVTH264EncBin *self = GST_VT_H264_ENC_BIN_CAST (obj);
switch (prop_id) {
case PROP_BITRATE:
g_object_set_property (G_OBJECT (self->priv->encoder),
g_param_spec_get_name (pspec), value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
break;
}
}
static void
gst_vt_h264_enc_bin_class_init (GstVTH264EncBinClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
gobject_class->get_property = gst_vt_h264_enc_bin_get_property;
gobject_class->set_property = gst_vt_h264_enc_bin_set_property;
g_type_class_add_private (klass, sizeof (GstVTH264EncBinPrivate));
g_object_class_install_property (gobject_class, PROP_BITRATE,
g_param_spec_uint ("bitrate", "Bitrate",
"Target video bitrate in kbps",
1, G_MAXUINT, VT_H264_ENC_BIN_DEFAULT_BITRATE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (gst_vt_h264_enc_bin_debug,
"vth264encbin", 0, "VideoToolbox H.264 encoder bin");
}

View file

@ -0,0 +1,61 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VTH264ENCBIN_H__
#define __GST_VTH264ENCBIN_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_VT_H264_ENC_BIN (gst_vt_h264_enc_bin_get_type())
#define GST_VT_H264_ENC_BIN(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VT_H264_ENC_BIN, \
GstVTH264EncBin))
#define GST_VT_H264_ENC_BIN_CAST(obj) \
((GstVTH264EncBin *) (obj))
#define GST_VT_H264_ENC_BIN_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VT_H264_ENC_BIN, \
GstVTH264EncBinClass))
#define GST_IS_VT_H264_ENC_BIN(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VT_H264_ENC_BIN))
#define GST_IS_VT_H264_ENC_BIN_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VT_H264_ENC_BIN))
typedef struct _GstVTH264EncBin GstVTH264EncBin;
typedef struct _GstVTH264EncBinPrivate GstVTH264EncBinPrivate;
typedef struct _GstVTH264EncBinClass GstVTH264EncBinClass;
struct _GstVTH264EncBin
{
GstBin parent;
GstVTH264EncBinPrivate * priv;
};
struct _GstVTH264EncBinClass
{
GstBinClass parent_class;
};
GType gst_vt_h264_enc_bin_get_type (void);
G_END_DECLS
#endif /* __GST_VTH264ENCBIN_H__ */

61
sys/applemedia/vtutil.c Normal file
View file

@ -0,0 +1,61 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "vtutil.h"
gchar *
gst_vtutil_object_to_string (CFTypeRef obj)
{
gchar *result;
CFStringRef s;
if (obj == NULL)
return g_strdup ("(null)");
s = CFCopyDescription (obj);
result = gst_vtutil_string_to_utf8 (s);
CFRelease (s);
return result;
}
gchar *
gst_vtutil_string_to_utf8 (CFStringRef s)
{
gchar *result;
CFIndex size;
size = CFStringGetMaximumSizeForEncoding (CFStringGetLength (s),
kCFStringEncodingUTF8);
result = g_malloc (size + 1);
CFStringGetCString (s, result, size + 1, kCFStringEncodingUTF8);
return result;
}
void
gst_vtutil_dict_set_i32 (CFMutableDictionaryRef dict, CFStringRef key,
gint32 value)
{
CFNumberRef number;
number = CFNumberCreate (NULL, kCFNumberSInt32Type, &value);
CFDictionarySetValue (dict, key, number);
CFRelease (number);
}

35
sys/applemedia/vtutil.h Normal file
View file

@ -0,0 +1,35 @@
/*
* Copyright (C) 2010 Ole André Vadla Ravnås <oravnas@cisco.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VTUTIL_H__
#define __GST_VTUTIL_H__
#include <glib.h>
#include <CoreFoundation/CoreFoundation.h>
G_BEGIN_DECLS
gchar * gst_vtutil_object_to_string (CFTypeRef obj);
gchar * gst_vtutil_string_to_utf8 (CFStringRef s);
void gst_vtutil_dict_set_i32 (CFMutableDictionaryRef dict,
CFStringRef key, gint32 value);
G_END_DECLS
#endif /* __GST_VTUTIL_H__ */