2010-11-04 13:14:09 +00:00
|
|
|
/*
|
2013-02-16 01:44:19 +00:00
|
|
|
* Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
|
2010-11-04 13:14:09 +00:00
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
2012-11-03 20:38:00 +00:00
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
2010-11-04 13:14:09 +00:00
|
|
|
*/
|
|
|
|
|
2013-11-07 00:37:38 +00:00
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
# include "config.h"
|
|
|
|
#endif
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
#include "avfvideosrc.h"
|
2016-01-29 04:07:59 +00:00
|
|
|
#include "glcontexthelper.h"
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
#import <AVFoundation/AVFoundation.h>
|
2015-02-25 12:52:21 +00:00
|
|
|
#if !HAVE_IOS
|
|
|
|
#import <AppKit/AppKit.h>
|
|
|
|
#endif
|
2010-11-04 13:14:09 +00:00
|
|
|
#include <gst/video/video.h>
|
2015-01-08 13:16:17 +00:00
|
|
|
#include <gst/gl/gstglcontext.h>
|
2013-04-24 20:22:28 +00:00
|
|
|
#include "coremediabuffer.h"
|
2015-12-10 05:22:08 +00:00
|
|
|
#include "videotexturecache.h"
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
#define DEFAULT_DEVICE_INDEX -1
|
|
|
|
#define DEFAULT_DO_STATS FALSE
|
|
|
|
|
|
|
|
#define DEVICE_FPS_N 25
|
|
|
|
#define DEVICE_FPS_D 1
|
|
|
|
|
|
|
|
#define BUFFER_QUEUE_SIZE 2
|
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
|
|
|
|
#define GST_CAT_DEFAULT gst_avf_video_src_debug
|
|
|
|
|
|
|
|
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
|
|
|
|
GST_PAD_SRC,
|
|
|
|
GST_PAD_ALWAYS,
|
2015-12-16 02:47:00 +00:00
|
|
|
GST_STATIC_CAPS (
|
|
|
|
#if !HAVE_IOS
|
|
|
|
GST_VIDEO_CAPS_MAKE_WITH_FEATURES
|
|
|
|
(GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
|
|
|
|
"UYVY") ", "
|
|
|
|
"texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
|
|
|
|
#else
|
|
|
|
GST_VIDEO_CAPS_MAKE_WITH_FEATURES
|
|
|
|
(GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
|
2016-01-06 00:29:15 +00:00
|
|
|
"NV12") ", "
|
2015-12-16 02:47:00 +00:00
|
|
|
"texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
|
|
|
|
#endif
|
|
|
|
"video/x-raw, "
|
2013-10-31 13:03:58 +00:00
|
|
|
"format = (string) { NV12, UYVY, YUY2 }, "
|
|
|
|
"framerate = " GST_VIDEO_FPS_RANGE ", "
|
|
|
|
"width = " GST_VIDEO_SIZE_RANGE ", "
|
|
|
|
"height = " GST_VIDEO_SIZE_RANGE "; "
|
|
|
|
|
|
|
|
"video/x-raw, "
|
2014-09-10 15:38:52 +00:00
|
|
|
"format = (string) BGRA, "
|
2013-10-31 13:03:58 +00:00
|
|
|
"framerate = " GST_VIDEO_FPS_RANGE ", "
|
|
|
|
"width = " GST_VIDEO_SIZE_RANGE ", "
|
|
|
|
"height = " GST_VIDEO_SIZE_RANGE "; "
|
2013-10-30 12:39:24 +00:00
|
|
|
));
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
typedef enum _QueueState {
|
|
|
|
NO_BUFFERS = 1,
|
|
|
|
HAS_BUFFER_OR_STOP_REQUEST,
|
|
|
|
} QueueState;
|
|
|
|
|
2013-10-28 11:53:26 +00:00
|
|
|
#define gst_avf_video_src_parent_class parent_class
|
|
|
|
G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
@interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
|
|
|
GstElement *element;
|
|
|
|
GstBaseSrc *baseSrc;
|
|
|
|
GstPushSrc *pushSrc;
|
|
|
|
|
|
|
|
gint deviceIndex;
|
|
|
|
BOOL doStats;
|
|
|
|
|
|
|
|
AVCaptureSession *session;
|
2013-11-04 11:14:35 +00:00
|
|
|
AVCaptureInput *input;
|
2010-11-04 13:14:09 +00:00
|
|
|
AVCaptureVideoDataOutput *output;
|
|
|
|
AVCaptureDevice *device;
|
2015-04-17 02:12:10 +00:00
|
|
|
AVCaptureConnection *connection;
|
2014-12-26 11:45:41 +00:00
|
|
|
CMClockRef inputClock;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
dispatch_queue_t mainQueue;
|
|
|
|
dispatch_queue_t workerQueue;
|
|
|
|
NSConditionLock *bufQueueLock;
|
|
|
|
NSMutableArray *bufQueue;
|
|
|
|
BOOL stopRequest;
|
|
|
|
|
2013-10-29 15:37:16 +00:00
|
|
|
GstCaps *caps;
|
2012-10-30 18:30:38 +00:00
|
|
|
GstVideoFormat format;
|
2010-11-04 13:14:09 +00:00
|
|
|
gint width, height;
|
2014-12-17 01:20:54 +00:00
|
|
|
GstClockTime latency;
|
2010-11-04 13:14:09 +00:00
|
|
|
guint64 offset;
|
|
|
|
|
|
|
|
GstClockTime lastSampling;
|
|
|
|
guint count;
|
|
|
|
gint fps;
|
2013-11-11 13:24:08 +00:00
|
|
|
BOOL captureScreen;
|
|
|
|
BOOL captureScreenCursor;
|
|
|
|
BOOL captureScreenMouseClicks;
|
2014-04-09 10:47:12 +00:00
|
|
|
|
|
|
|
BOOL useVideoMeta;
|
2016-01-29 04:07:59 +00:00
|
|
|
GstGLContextHelper *ctxh;
|
2015-12-10 05:22:08 +00:00
|
|
|
GstVideoTextureCache *textureCache;
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (id)init;
|
|
|
|
- (id)initWithSrc:(GstPushSrc *)src;
|
|
|
|
- (void)finalize;
|
|
|
|
|
|
|
|
@property int deviceIndex;
|
|
|
|
@property BOOL doStats;
|
|
|
|
@property int fps;
|
2013-11-04 11:14:35 +00:00
|
|
|
@property BOOL captureScreen;
|
|
|
|
@property BOOL captureScreenCursor;
|
|
|
|
@property BOOL captureScreenMouseClicks;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
- (BOOL)openScreenInput;
|
|
|
|
- (BOOL)openDeviceInput;
|
2010-11-04 13:14:09 +00:00
|
|
|
- (BOOL)openDevice;
|
|
|
|
- (void)closeDevice;
|
2013-10-31 13:03:58 +00:00
|
|
|
- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
|
2015-02-25 12:52:21 +00:00
|
|
|
#if !HAVE_IOS
|
|
|
|
- (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
|
|
|
|
#endif
|
2015-12-16 02:47:00 +00:00
|
|
|
- (GstCaps *)getDeviceCaps;
|
2013-10-31 13:03:58 +00:00
|
|
|
- (BOOL)setDeviceCaps:(GstVideoInfo *)info;
|
|
|
|
- (BOOL)getSessionPresetCaps:(GstCaps *)result;
|
|
|
|
- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
|
2010-11-04 13:14:09 +00:00
|
|
|
- (GstCaps *)getCaps;
|
2013-10-29 15:37:16 +00:00
|
|
|
- (BOOL)setCaps:(GstCaps *)new_caps;
|
2010-11-04 13:14:09 +00:00
|
|
|
- (BOOL)start;
|
|
|
|
- (BOOL)stop;
|
|
|
|
- (BOOL)unlock;
|
|
|
|
- (BOOL)unlockStop;
|
|
|
|
- (BOOL)query:(GstQuery *)query;
|
|
|
|
- (GstStateChangeReturn)changeState:(GstStateChange)transition;
|
|
|
|
- (GstFlowReturn)create:(GstBuffer **)buf;
|
2015-12-16 03:38:44 +00:00
|
|
|
- (GstCaps *)fixate:(GstCaps *)caps;
|
2016-01-29 04:07:59 +00:00
|
|
|
- (BOOL)decideAllocation:(GstQuery *)query;
|
2010-11-04 13:14:09 +00:00
|
|
|
- (void)updateStatistics;
|
|
|
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
|
|
|
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
|
|
fromConnection:(AVCaptureConnection *)connection;
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
@implementation GstAVFVideoSrcImpl
|
|
|
|
|
2013-11-06 17:50:46 +00:00
|
|
|
@synthesize deviceIndex, doStats, fps, captureScreen,
|
|
|
|
captureScreenCursor, captureScreenMouseClicks;
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
- (id)init
|
|
|
|
{
|
|
|
|
return [self initWithSrc:NULL];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (id)initWithSrc:(GstPushSrc *)src
|
|
|
|
{
|
|
|
|
if ((self = [super init])) {
|
|
|
|
element = GST_ELEMENT_CAST (src);
|
|
|
|
baseSrc = GST_BASE_SRC_CAST (src);
|
|
|
|
pushSrc = src;
|
|
|
|
|
|
|
|
deviceIndex = DEFAULT_DEVICE_INDEX;
|
2013-11-04 11:14:35 +00:00
|
|
|
captureScreen = NO;
|
|
|
|
captureScreenCursor = NO;
|
|
|
|
captureScreenMouseClicks = NO;
|
2014-04-09 10:47:12 +00:00
|
|
|
useVideoMeta = NO;
|
2015-01-08 13:16:17 +00:00
|
|
|
textureCache = NULL;
|
2016-01-29 04:07:59 +00:00
|
|
|
ctxh = gst_gl_context_helper_new (element);
|
2013-10-28 18:22:13 +00:00
|
|
|
mainQueue =
|
|
|
|
dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
|
2010-11-04 13:14:09 +00:00
|
|
|
workerQueue =
|
2013-10-28 18:22:13 +00:00
|
|
|
dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
gst_base_src_set_live (baseSrc, TRUE);
|
|
|
|
gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
|
|
|
|
}
|
|
|
|
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)finalize
|
|
|
|
{
|
2013-10-28 18:22:13 +00:00
|
|
|
dispatch_release (mainQueue);
|
2010-11-04 13:14:09 +00:00
|
|
|
mainQueue = NULL;
|
|
|
|
dispatch_release (workerQueue);
|
|
|
|
workerQueue = NULL;
|
|
|
|
|
|
|
|
[super finalize];
|
|
|
|
}
|
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
- (BOOL)openDeviceInput
|
|
|
|
{
|
|
|
|
NSString *mediaType = AVMediaTypeVideo;
|
|
|
|
NSError *err;
|
|
|
|
|
2015-02-25 12:52:21 +00:00
|
|
|
if (deviceIndex == DEFAULT_DEVICE_INDEX) {
|
2013-11-04 11:14:35 +00:00
|
|
|
device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
|
|
|
|
if (device == nil) {
|
|
|
|
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
|
|
|
|
("No video capture devices found"), (NULL));
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
|
|
|
if (deviceIndex >= [devices count]) {
|
|
|
|
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
|
|
|
|
("Invalid video capture device index"), (NULL));
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
device = [devices objectAtIndex:deviceIndex];
|
|
|
|
}
|
|
|
|
g_assert (device != nil);
|
|
|
|
[device retain];
|
|
|
|
|
|
|
|
GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
|
|
|
|
|
|
|
|
input = [AVCaptureDeviceInput deviceInputWithDevice:device
|
|
|
|
error:&err];
|
|
|
|
if (input == nil) {
|
|
|
|
GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
|
|
|
|
("Failed to open device: %s",
|
|
|
|
[[err localizedDescription] UTF8String]),
|
|
|
|
(NULL));
|
|
|
|
[device release];
|
|
|
|
device = nil;
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
[input retain];
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)openScreenInput
|
|
|
|
{
|
|
|
|
#if HAVE_IOS
|
|
|
|
return NO;
|
|
|
|
#else
|
2015-02-25 12:52:21 +00:00
|
|
|
CGDirectDisplayID displayId;
|
|
|
|
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Opening screen input");
|
|
|
|
|
2015-02-25 12:52:21 +00:00
|
|
|
displayId = [self getDisplayIdFromDeviceIndex];
|
|
|
|
if (displayId == 0)
|
|
|
|
return NO;
|
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
AVCaptureScreenInput *screenInput =
|
|
|
|
[[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
|
|
|
|
|
2013-11-07 11:29:34 +00:00
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
@try {
|
|
|
|
[screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
|
|
|
|
forKey:@"capturesCursor"];
|
|
|
|
|
|
|
|
} @catch (NSException *exception) {
|
2013-11-07 11:29:34 +00:00
|
|
|
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
|
2013-11-04 11:14:35 +00:00
|
|
|
GST_WARNING ("An unexpected error occured: %s",
|
|
|
|
[[exception reason] UTF8String]);
|
|
|
|
}
|
|
|
|
GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
|
|
|
|
}
|
|
|
|
screenInput.capturesMouseClicks = captureScreenMouseClicks;
|
|
|
|
input = screenInput;
|
|
|
|
[input retain];
|
|
|
|
return YES;
|
|
|
|
#endif
|
|
|
|
}
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
- (BOOL)openDevice
|
|
|
|
{
|
|
|
|
BOOL success = NO, *successPtr = &success;
|
|
|
|
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Opening device");
|
|
|
|
|
2013-10-30 14:51:50 +00:00
|
|
|
dispatch_sync (mainQueue, ^{
|
2013-11-04 11:14:35 +00:00
|
|
|
BOOL ret;
|
|
|
|
|
|
|
|
if (captureScreen)
|
|
|
|
ret = [self openScreenInput];
|
|
|
|
else
|
|
|
|
ret = [self openDeviceInput];
|
|
|
|
|
|
|
|
if (!ret)
|
2010-11-04 13:14:09 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
output = [[AVCaptureVideoDataOutput alloc] init];
|
|
|
|
[output setSampleBufferDelegate:self
|
|
|
|
queue:workerQueue];
|
|
|
|
output.alwaysDiscardsLateVideoFrames = YES;
|
|
|
|
output.videoSettings = nil; /* device native format */
|
|
|
|
|
|
|
|
session = [[AVCaptureSession alloc] init];
|
|
|
|
[session addInput:input];
|
|
|
|
[session addOutput:output];
|
|
|
|
|
2015-04-17 02:12:10 +00:00
|
|
|
/* retained by session */
|
|
|
|
connection = [[output connections] firstObject];
|
|
|
|
inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
*successPtr = YES;
|
|
|
|
});
|
|
|
|
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)closeDevice
|
|
|
|
{
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Closing device");
|
|
|
|
|
2013-10-30 14:51:50 +00:00
|
|
|
dispatch_sync (mainQueue, ^{
|
2010-11-04 13:14:09 +00:00
|
|
|
g_assert (![session isRunning]);
|
|
|
|
|
2015-04-17 02:12:10 +00:00
|
|
|
connection = nil;
|
|
|
|
inputClock = nil;
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
[session removeInput:input];
|
|
|
|
[session removeOutput:output];
|
|
|
|
|
|
|
|
[session release];
|
|
|
|
session = nil;
|
|
|
|
|
|
|
|
[input release];
|
|
|
|
input = nil;
|
|
|
|
|
|
|
|
[output release];
|
|
|
|
output = nil;
|
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
if (!captureScreen) {
|
|
|
|
[device release];
|
|
|
|
device = nil;
|
|
|
|
}
|
2013-10-29 15:37:16 +00:00
|
|
|
|
|
|
|
if (caps)
|
|
|
|
gst_caps_unref (caps);
|
2015-02-18 17:06:09 +00:00
|
|
|
caps = NULL;
|
2010-11-04 13:14:09 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
#define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d) \
|
2013-10-28 11:53:26 +00:00
|
|
|
(gst_caps_new_simple ("video/x-raw", \
|
|
|
|
"width", G_TYPE_INT, w, \
|
|
|
|
"height", G_TYPE_INT, h, \
|
|
|
|
"format", G_TYPE_STRING, gst_video_format_to_string (format), \
|
2013-10-31 13:03:58 +00:00
|
|
|
"framerate", GST_TYPE_FRACTION, (fps_n), (fps_d), \
|
2013-10-28 11:53:26 +00:00
|
|
|
NULL))
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2015-06-16 14:18:52 +00:00
|
|
|
#define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
|
|
|
|
(gst_caps_new_simple ("video/x-raw", \
|
|
|
|
"width", G_TYPE_INT, w, \
|
|
|
|
"height", G_TYPE_INT, h, \
|
|
|
|
"format", G_TYPE_STRING, gst_video_format_to_string (format), \
|
|
|
|
"framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
|
|
|
|
NULL))
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
2013-10-31 13:03:58 +00:00
|
|
|
GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
|
|
|
|
|
|
|
|
switch ([pixel_format integerValue]) {
|
|
|
|
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
|
|
|
|
gst_format = GST_VIDEO_FORMAT_NV12;
|
|
|
|
break;
|
|
|
|
case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
|
|
|
|
gst_format = GST_VIDEO_FORMAT_UYVY;
|
|
|
|
break;
|
|
|
|
case kCVPixelFormatType_32BGRA: /* BGRA */
|
|
|
|
gst_format = GST_VIDEO_FORMAT_BGRA;
|
|
|
|
break;
|
|
|
|
case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
|
|
|
|
gst_format = GST_VIDEO_FORMAT_YUY2;
|
|
|
|
break;
|
|
|
|
default:
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
|
|
|
|
[[pixel_format stringValue] UTF8String]);
|
2013-10-31 13:03:58 +00:00
|
|
|
break;
|
|
|
|
}
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
return gst_format;
|
|
|
|
}
|
2013-10-29 17:33:11 +00:00
|
|
|
|
2015-02-25 12:52:21 +00:00
|
|
|
#if !HAVE_IOS
|
|
|
|
- (CGDirectDisplayID)getDisplayIdFromDeviceIndex
|
|
|
|
{
|
|
|
|
NSDictionary *description;
|
|
|
|
NSNumber *displayId;
|
|
|
|
NSArray *screens = [NSScreen screens];
|
|
|
|
|
|
|
|
if (deviceIndex == DEFAULT_DEVICE_INDEX)
|
|
|
|
return kCGDirectMainDisplay;
|
|
|
|
if (deviceIndex >= [screens count]) {
|
|
|
|
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
|
|
|
|
("Invalid screen capture device index"), (NULL));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
description = [[screens objectAtIndex:deviceIndex] deviceDescription];
|
|
|
|
displayId = [description objectForKey:@"NSScreenNumber"];
|
|
|
|
return [displayId unsignedIntegerValue];
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2015-12-16 02:47:00 +00:00
|
|
|
- (GstCaps *)getDeviceCaps
|
2013-10-31 13:03:58 +00:00
|
|
|
{
|
|
|
|
NSArray *formats = [device valueForKey:@"formats"];
|
|
|
|
NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
|
2015-12-16 02:47:00 +00:00
|
|
|
GstCaps *result_caps, *result_gl_caps;
|
|
|
|
#if !HAVE_IOS
|
|
|
|
GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
|
|
|
|
#else
|
2016-01-06 00:29:15 +00:00
|
|
|
GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
|
2015-12-16 02:47:00 +00:00
|
|
|
#endif
|
2012-10-30 18:30:38 +00:00
|
|
|
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Getting device caps");
|
|
|
|
|
2015-12-16 02:47:00 +00:00
|
|
|
result_caps = gst_caps_new_empty ();
|
|
|
|
result_gl_caps = gst_caps_new_empty ();
|
|
|
|
|
2013-11-07 11:20:31 +00:00
|
|
|
/* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
|
|
|
|
* available in iOS >= 7.0. We use a dynamic approach with key-value
|
|
|
|
* coding or performSelector */
|
|
|
|
for (NSObject *f in [formats reverseObjectEnumerator]) {
|
|
|
|
CMFormatDescriptionRef formatDescription;
|
|
|
|
CMVideoDimensions dimensions;
|
|
|
|
|
|
|
|
/* formatDescription can't be retrieved with valueForKey so use a selector here */
|
|
|
|
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
|
|
|
|
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
|
|
|
|
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
|
2015-06-16 14:18:52 +00:00
|
|
|
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
|
|
|
|
gdouble min_fps, max_fps;
|
|
|
|
|
|
|
|
[[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
|
|
|
|
gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
|
2013-10-31 13:03:58 +00:00
|
|
|
|
2013-11-07 11:20:31 +00:00
|
|
|
[[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
|
2015-06-16 14:18:52 +00:00
|
|
|
gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
|
2013-10-31 13:03:58 +00:00
|
|
|
|
|
|
|
for (NSNumber *pixel_format in pixel_formats) {
|
|
|
|
GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
|
2015-12-16 02:47:00 +00:00
|
|
|
|
2015-06-16 14:18:52 +00:00
|
|
|
if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
|
|
|
|
if (min_fps != max_fps)
|
2015-12-16 02:47:00 +00:00
|
|
|
gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
|
2015-06-16 14:18:52 +00:00
|
|
|
else
|
2015-12-16 02:47:00 +00:00
|
|
|
gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
|
2015-06-16 14:18:52 +00:00
|
|
|
}
|
2015-01-08 13:16:17 +00:00
|
|
|
|
2015-12-16 02:47:00 +00:00
|
|
|
if (gst_format == gl_format) {
|
|
|
|
GstCaps *gl_caps;
|
|
|
|
if (min_fps != max_fps) {
|
|
|
|
gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
|
|
|
|
dimensions.width, dimensions.height,
|
|
|
|
min_fps_n, min_fps_d,
|
|
|
|
max_fps_n, max_fps_d);
|
|
|
|
} else {
|
|
|
|
gl_caps = GST_AVF_CAPS_NEW (gl_format,
|
|
|
|
dimensions.width, dimensions.height,
|
|
|
|
max_fps_n, max_fps_d);
|
|
|
|
}
|
|
|
|
gst_caps_set_features (gl_caps, 0,
|
|
|
|
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
|
|
|
|
NULL));
|
|
|
|
gst_caps_set_simple (gl_caps,
|
|
|
|
"texture-target", G_TYPE_STRING,
|
|
|
|
#if !HAVE_IOS
|
|
|
|
GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
|
|
|
|
#else
|
|
|
|
GST_GL_TEXTURE_TARGET_2D_STR,
|
|
|
|
#endif
|
|
|
|
NULL);
|
|
|
|
gst_caps_append (result_gl_caps, gl_caps);
|
2015-01-08 13:16:17 +00:00
|
|
|
}
|
2013-10-31 13:03:58 +00:00
|
|
|
}
|
2012-10-30 18:30:38 +00:00
|
|
|
}
|
2013-10-31 13:03:58 +00:00
|
|
|
}
|
2015-12-16 02:47:00 +00:00
|
|
|
|
|
|
|
result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
|
|
|
|
|
2016-01-06 05:50:00 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, result_gl_caps);
|
2015-12-16 02:47:00 +00:00
|
|
|
|
|
|
|
return result_gl_caps;
|
2013-10-31 13:03:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)setDeviceCaps:(GstVideoInfo *)info
|
|
|
|
{
|
|
|
|
double framerate;
|
|
|
|
gboolean found_format = FALSE, found_framerate = FALSE;
|
|
|
|
NSArray *formats = [device valueForKey:@"formats"];
|
|
|
|
gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
|
|
|
|
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Setting device caps");
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
if ([device lockForConfiguration:NULL] == YES) {
|
2013-11-07 11:20:31 +00:00
|
|
|
for (NSObject *f in formats) {
|
|
|
|
CMFormatDescriptionRef formatDescription;
|
|
|
|
CMVideoDimensions dimensions;
|
|
|
|
|
|
|
|
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
|
|
|
|
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
|
2013-10-31 13:03:58 +00:00
|
|
|
if (dimensions.width == info->width && dimensions.height == info->height) {
|
|
|
|
found_format = TRUE;
|
2013-11-07 11:20:31 +00:00
|
|
|
[device setValue:f forKey:@"activeFormat"];
|
|
|
|
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
|
2015-06-16 14:18:52 +00:00
|
|
|
gdouble min_frame_rate, max_frame_rate;
|
2013-11-07 11:20:31 +00:00
|
|
|
|
2015-06-16 14:18:52 +00:00
|
|
|
[[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
|
2013-11-07 11:20:31 +00:00
|
|
|
[[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
|
2015-06-16 14:18:52 +00:00
|
|
|
if ((framerate >= min_frame_rate - 0.00001) &&
|
|
|
|
(framerate <= max_frame_rate + 0.00001)) {
|
2016-01-27 03:36:36 +00:00
|
|
|
NSValue *frame_duration_value;
|
2013-10-31 13:03:58 +00:00
|
|
|
found_framerate = TRUE;
|
2016-01-27 03:36:36 +00:00
|
|
|
if (min_frame_rate == max_frame_rate) {
|
|
|
|
/* on mac we get tight ranges and an exception is raised if the
|
|
|
|
* frame duration doesn't match the one reported in the range to
|
|
|
|
* the last decimal point
|
|
|
|
*/
|
|
|
|
frame_duration_value = [rate valueForKey:@"minFrameDuration"];
|
|
|
|
} else {
|
|
|
|
double frame_duration;
|
|
|
|
|
|
|
|
gst_util_fraction_to_double (info->fps_d, info->fps_n, &frame_duration);
|
|
|
|
frame_duration_value = [NSNumber numberWithDouble:frame_duration];
|
|
|
|
}
|
|
|
|
[device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
|
2013-10-31 13:03:58 +00:00
|
|
|
@try {
|
|
|
|
/* Only available on OSX >= 10.8 and iOS >= 7.0 */
|
2016-01-27 03:36:36 +00:00
|
|
|
[device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
|
2013-10-31 13:03:58 +00:00
|
|
|
} @catch (NSException *exception) {
|
2013-11-07 11:29:34 +00:00
|
|
|
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
|
2013-10-31 13:03:58 +00:00
|
|
|
GST_WARNING ("An unexcepted error occured: %s",
|
|
|
|
[exception.reason UTF8String]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!found_format) {
|
|
|
|
GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
if (!found_framerate) {
|
|
|
|
GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
GST_WARNING ("Couldn't lock device for configuration");
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)getSessionPresetCaps:(GstCaps *)result
|
|
|
|
{
|
|
|
|
NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
|
|
|
|
for (NSNumber *pixel_format in pixel_formats) {
|
|
|
|
GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
|
|
|
|
if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
|
|
|
|
continue;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2013-10-30 12:39:24 +00:00
|
|
|
#if HAVE_IOS
|
2012-10-30 18:30:38 +00:00
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
|
2013-10-31 13:03:58 +00:00
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
|
|
|
|
#endif
|
2013-11-06 17:49:35 +00:00
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
|
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
|
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
|
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
|
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
|
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
|
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
|
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
|
|
|
|
if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
|
|
|
|
gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
|
2013-10-31 13:03:58 +00:00
|
|
|
}
|
2013-11-06 17:53:25 +00:00
|
|
|
|
|
|
|
GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
|
|
|
|
{
|
2013-11-06 17:53:25 +00:00
|
|
|
GST_DEBUG_OBJECT (element, "Setting session presset caps");
|
2013-10-31 13:03:58 +00:00
|
|
|
|
|
|
|
if ([device lockForConfiguration:NULL] != YES) {
|
|
|
|
GST_WARNING ("Couldn't lock device for configuration");
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (info->width) {
|
|
|
|
case 192:
|
|
|
|
session.sessionPreset = AVCaptureSessionPresetLow;
|
|
|
|
break;
|
|
|
|
case 352:
|
|
|
|
session.sessionPreset = AVCaptureSessionPreset352x288;
|
|
|
|
break;
|
|
|
|
case 480:
|
|
|
|
session.sessionPreset = AVCaptureSessionPresetMedium;
|
|
|
|
break;
|
|
|
|
case 640:
|
|
|
|
session.sessionPreset = AVCaptureSessionPreset640x480;
|
|
|
|
break;
|
|
|
|
case 1280:
|
|
|
|
session.sessionPreset = AVCaptureSessionPreset1280x720;
|
|
|
|
break;
|
|
|
|
#if HAVE_IOS
|
|
|
|
case 1920:
|
|
|
|
session.sessionPreset = AVCaptureSessionPreset1920x1080;
|
|
|
|
break;
|
2013-10-30 12:39:24 +00:00
|
|
|
#endif
|
2013-10-31 13:03:58 +00:00
|
|
|
default:
|
|
|
|
GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (GstCaps *)getCaps
|
|
|
|
{
|
|
|
|
GstCaps *result;
|
|
|
|
NSArray *pixel_formats;
|
|
|
|
|
|
|
|
if (session == nil)
|
|
|
|
return NULL; /* BaseSrc will return template caps */
|
|
|
|
|
|
|
|
result = gst_caps_new_empty ();
|
|
|
|
pixel_formats = output.availableVideoCVPixelFormatTypes;
|
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
if (captureScreen) {
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2015-02-25 12:52:21 +00:00
|
|
|
CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
|
2013-11-04 11:14:35 +00:00
|
|
|
for (NSNumber *pixel_format in pixel_formats) {
|
|
|
|
GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
|
|
|
|
if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
|
|
|
|
gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
|
|
|
|
"width", G_TYPE_INT, (int)rect.size.width,
|
|
|
|
"height", G_TYPE_INT, (int)rect.size.height,
|
|
|
|
"format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
|
|
|
|
NULL));
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
GST_WARNING ("Screen capture is not supported by iOS");
|
|
|
|
#endif
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
@try {
|
2015-12-16 02:47:00 +00:00
|
|
|
result = gst_caps_merge (result, [self getDeviceCaps]);
|
2013-10-31 13:03:58 +00:00
|
|
|
} @catch (NSException *exception) {
|
2013-11-07 11:29:34 +00:00
|
|
|
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
|
2013-10-31 13:03:58 +00:00
|
|
|
GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Fallback on session presets API for iOS < 7.0 */
|
|
|
|
[self getSessionPresetCaps:result];
|
2012-10-30 18:30:38 +00:00
|
|
|
}
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2013-10-29 15:37:16 +00:00
|
|
|
- (BOOL)setCaps:(GstCaps *)new_caps
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
2013-10-28 11:53:26 +00:00
|
|
|
GstVideoInfo info;
|
2013-10-31 13:03:58 +00:00
|
|
|
BOOL success = YES, *successPtr = &success;
|
2013-10-28 11:53:26 +00:00
|
|
|
|
|
|
|
gst_video_info_init (&info);
|
2013-10-29 15:37:16 +00:00
|
|
|
gst_video_info_from_caps (&info, new_caps);
|
2013-10-28 11:53:26 +00:00
|
|
|
|
|
|
|
width = info.width;
|
|
|
|
height = info.height;
|
|
|
|
format = info.finfo->format;
|
2014-12-17 01:20:54 +00:00
|
|
|
latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2013-10-30 14:51:50 +00:00
|
|
|
dispatch_sync (mainQueue, ^{
|
2012-10-30 18:30:38 +00:00
|
|
|
int newformat;
|
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
if (captureScreen) {
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2013-11-04 11:14:35 +00:00
|
|
|
AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
|
|
|
|
screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
|
|
|
|
#else
|
|
|
|
GST_WARNING ("Screen capture is not supported by iOS");
|
|
|
|
*successPtr = NO;
|
|
|
|
return;
|
|
|
|
#endif
|
|
|
|
} else {
|
|
|
|
@try {
|
2013-10-31 13:03:58 +00:00
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
/* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
|
|
|
|
*successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
|
|
|
|
if (*successPtr != YES)
|
|
|
|
return;
|
2013-10-31 13:03:58 +00:00
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
} @catch (NSException *exception) {
|
2013-10-31 13:03:58 +00:00
|
|
|
|
2013-11-07 11:29:34 +00:00
|
|
|
if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
|
2013-11-04 11:14:35 +00:00
|
|
|
GST_WARNING ("An unexcepted error occured: %s", [exception.reason UTF8String]);
|
|
|
|
*successPtr = NO;
|
|
|
|
return;
|
|
|
|
}
|
2013-10-31 13:03:58 +00:00
|
|
|
|
2013-11-04 11:14:35 +00:00
|
|
|
/* Fallback on session presets API for iOS < 7.0 */
|
|
|
|
*successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
|
|
|
|
if (*successPtr != YES)
|
|
|
|
return;
|
|
|
|
}
|
2012-10-30 18:30:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
switch (format) {
|
|
|
|
case GST_VIDEO_FORMAT_NV12:
|
|
|
|
newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_UYVY:
|
2015-04-20 07:08:23 +00:00
|
|
|
newformat = kCVPixelFormatType_422YpCbCr8;
|
2012-10-30 18:30:38 +00:00
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_YUY2:
|
2015-04-20 07:08:23 +00:00
|
|
|
newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
|
2012-10-30 18:30:38 +00:00
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_BGRA:
|
2015-04-20 07:08:23 +00:00
|
|
|
newformat = kCVPixelFormatType_32BGRA;
|
2012-10-30 18:30:38 +00:00
|
|
|
break;
|
2010-11-04 13:14:09 +00:00
|
|
|
default:
|
2013-10-31 13:03:58 +00:00
|
|
|
*successPtr = NO;
|
|
|
|
GST_WARNING ("Unsupported output format %s",
|
|
|
|
gst_video_format_to_string (format));
|
|
|
|
return;
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
2015-12-16 02:47:00 +00:00
|
|
|
GST_INFO_OBJECT (element,
|
|
|
|
"width: %d height: %d format: %s", width, height,
|
|
|
|
gst_video_format_to_string (format));
|
2012-10-30 18:30:38 +00:00
|
|
|
|
2013-10-31 13:05:40 +00:00
|
|
|
output.videoSettings = [NSDictionary
|
|
|
|
dictionaryWithObject:[NSNumber numberWithInt:newformat]
|
|
|
|
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
|
2012-10-30 18:30:38 +00:00
|
|
|
|
2016-01-29 04:07:59 +00:00
|
|
|
gst_caps_replace (&caps, new_caps);
|
|
|
|
GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
|
2015-12-16 03:38:44 +00:00
|
|
|
|
2016-01-06 05:50:00 +00:00
|
|
|
if (![session isRunning])
|
|
|
|
[session startRunning];
|
2013-10-31 13:03:58 +00:00
|
|
|
|
|
|
|
/* Unlock device configuration only after session is started so the session
|
|
|
|
* won't reset the capture formats */
|
|
|
|
[device unlockForConfiguration];
|
2010-11-04 13:14:09 +00:00
|
|
|
});
|
|
|
|
|
2013-10-31 13:03:58 +00:00
|
|
|
return success;
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)start
|
|
|
|
{
|
|
|
|
bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
|
|
|
|
bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
|
|
|
|
stopRequest = NO;
|
|
|
|
|
|
|
|
offset = 0;
|
2014-12-17 01:20:54 +00:00
|
|
|
latency = GST_CLOCK_TIME_NONE;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
lastSampling = GST_CLOCK_TIME_NONE;
|
|
|
|
count = 0;
|
|
|
|
fps = -1;
|
|
|
|
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)stop
|
|
|
|
{
|
2013-10-30 14:51:50 +00:00
|
|
|
dispatch_sync (mainQueue, ^{ [session stopRunning]; });
|
|
|
|
dispatch_sync (workerQueue, ^{});
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
[bufQueueLock release];
|
|
|
|
bufQueueLock = nil;
|
|
|
|
[bufQueue release];
|
|
|
|
bufQueue = nil;
|
|
|
|
|
2015-01-08 13:16:17 +00:00
|
|
|
if (textureCache)
|
2016-01-29 04:07:59 +00:00
|
|
|
gst_video_texture_cache_free (textureCache);
|
2015-01-08 13:16:17 +00:00
|
|
|
textureCache = NULL;
|
|
|
|
|
2016-01-29 04:07:59 +00:00
|
|
|
if (ctxh)
|
|
|
|
gst_gl_context_helper_free (ctxh);
|
|
|
|
ctxh = NULL;
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)query:(GstQuery *)query
|
|
|
|
{
|
|
|
|
BOOL result = NO;
|
|
|
|
|
|
|
|
if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
|
2015-02-18 17:06:09 +00:00
|
|
|
if (device != nil && caps != NULL) {
|
2010-11-04 13:14:09 +00:00
|
|
|
GstClockTime min_latency, max_latency;
|
|
|
|
|
2014-12-17 01:20:54 +00:00
|
|
|
min_latency = max_latency = latency;
|
2010-11-04 13:14:09 +00:00
|
|
|
result = YES;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
|
|
|
|
" max %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
|
|
|
|
gst_query_set_latency (query, TRUE, min_latency, max_latency);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)unlock
|
|
|
|
{
|
|
|
|
[bufQueueLock lock];
|
|
|
|
stopRequest = YES;
|
|
|
|
[bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
|
|
|
|
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)unlockStop
|
|
|
|
{
|
|
|
|
[bufQueueLock lock];
|
|
|
|
stopRequest = NO;
|
2015-05-07 19:18:27 +00:00
|
|
|
[bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (GstStateChangeReturn)changeState:(GstStateChange)transition
|
|
|
|
{
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
|
|
|
|
if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
|
|
|
|
if (![self openDevice])
|
|
|
|
return GST_STATE_CHANGE_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
|
|
|
|
|
|
|
|
if (transition == GST_STATE_CHANGE_READY_TO_NULL)
|
|
|
|
[self closeDevice];
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
|
|
|
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
2015-04-17 02:12:10 +00:00
|
|
|
fromConnection:(AVCaptureConnection *)aConnection
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
2014-12-26 11:45:41 +00:00
|
|
|
GstClockTime timestamp, duration;
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
[bufQueueLock lock];
|
|
|
|
|
|
|
|
if (stopRequest) {
|
|
|
|
[bufQueueLock unlock];
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
[self getSampleBuffer:sampleBuffer timestamp:×tamp duration:&duration];
|
|
|
|
|
2015-04-14 22:24:45 +00:00
|
|
|
if (timestamp == GST_CLOCK_TIME_NONE) {
|
2015-05-07 19:18:27 +00:00
|
|
|
[bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
|
2015-04-14 22:24:45 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
if ([bufQueue count] == BUFFER_QUEUE_SIZE)
|
|
|
|
[bufQueue removeLastObject];
|
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
[bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
|
|
|
|
@"timestamp": @(timestamp),
|
|
|
|
@"duration": @(duration)}
|
2010-11-04 13:14:09 +00:00
|
|
|
atIndex:0];
|
|
|
|
|
|
|
|
[bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (GstFlowReturn)create:(GstBuffer **)buf
|
|
|
|
{
|
|
|
|
CMSampleBufferRef sbuf;
|
2013-10-29 15:37:16 +00:00
|
|
|
CVImageBufferRef image_buf;
|
|
|
|
CVPixelBufferRef pixel_buf;
|
|
|
|
size_t cur_width, cur_height;
|
2014-12-26 11:45:41 +00:00
|
|
|
GstClockTime timestamp, duration;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
[bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
|
|
|
|
if (stopRequest) {
|
|
|
|
[bufQueueLock unlock];
|
2012-02-08 15:37:13 +00:00
|
|
|
return GST_FLOW_FLUSHING;
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
|
|
|
|
sbuf = (CMSampleBufferRef) dic[@"sbuf"];
|
|
|
|
timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
|
|
|
|
duration = (GstClockTime) [dic[@"duration"] longLongValue];
|
2010-11-04 13:14:09 +00:00
|
|
|
CFRetain (sbuf);
|
|
|
|
[bufQueue removeLastObject];
|
|
|
|
[bufQueueLock unlockWithCondition:
|
|
|
|
([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
|
|
|
|
|
2013-10-29 15:37:16 +00:00
|
|
|
/* Check output frame size dimensions */
|
|
|
|
image_buf = CMSampleBufferGetImageBuffer (sbuf);
|
|
|
|
if (image_buf) {
|
|
|
|
pixel_buf = (CVPixelBufferRef) image_buf;
|
|
|
|
cur_width = CVPixelBufferGetWidth (pixel_buf);
|
|
|
|
cur_height = CVPixelBufferGetHeight (pixel_buf);
|
|
|
|
|
|
|
|
if (width != cur_width || height != cur_height) {
|
|
|
|
/* Set new caps according to current frame dimensions */
|
|
|
|
GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
|
|
|
|
width, height, (int)cur_width, (int)cur_height);
|
|
|
|
width = cur_width;
|
|
|
|
height = cur_height;
|
|
|
|
gst_caps_set_simple (caps,
|
|
|
|
"width", G_TYPE_INT, width,
|
|
|
|
"height", G_TYPE_INT, height,
|
|
|
|
NULL);
|
|
|
|
gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-19 04:50:22 +00:00
|
|
|
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
|
2015-04-18 22:21:40 +00:00
|
|
|
if (*buf == NULL) {
|
|
|
|
CFRelease (sbuf);
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
2010-11-04 13:14:09 +00:00
|
|
|
CFRelease (sbuf);
|
|
|
|
|
2015-04-18 22:21:40 +00:00
|
|
|
if (textureCache != NULL) {
|
2015-12-10 05:22:08 +00:00
|
|
|
*buf = gst_video_texture_cache_get_gl_buffer (textureCache, *buf);
|
2015-04-18 22:21:40 +00:00
|
|
|
if (*buf == NULL)
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
2015-01-08 13:16:17 +00:00
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
GST_BUFFER_OFFSET (*buf) = offset++;
|
2015-04-05 02:37:09 +00:00
|
|
|
GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
|
2014-12-26 11:45:41 +00:00
|
|
|
GST_BUFFER_TIMESTAMP (*buf) = timestamp;
|
|
|
|
GST_BUFFER_DURATION (*buf) = duration;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
if (doStats)
|
|
|
|
[self updateStatistics];
|
|
|
|
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
2015-12-16 03:38:44 +00:00
|
|
|
- (GstCaps *)fixate:(GstCaps *)new_caps
|
|
|
|
{
|
2015-12-28 23:57:36 +00:00
|
|
|
GstStructure *structure;
|
2015-12-16 03:38:44 +00:00
|
|
|
|
|
|
|
new_caps = gst_caps_make_writable (new_caps);
|
2015-12-28 23:57:36 +00:00
|
|
|
new_caps = gst_caps_truncate (new_caps);
|
|
|
|
structure = gst_caps_get_structure (new_caps, 0);
|
|
|
|
/* crank up to 11. This is what the presets do, but we don't use the presets
|
|
|
|
* in ios >= 7.0 */
|
|
|
|
gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
|
|
|
|
gst_structure_fixate_field_nearest_fraction (structure, "framerate", G_MAXINT, 1);
|
|
|
|
|
2015-12-16 03:38:44 +00:00
|
|
|
return gst_caps_fixate (new_caps);
|
|
|
|
}
|
|
|
|
|
2016-01-29 04:07:59 +00:00
|
|
|
- (BOOL)decideAllocation:(GstQuery *)query
|
|
|
|
{
|
|
|
|
GstCaps *alloc_caps;
|
|
|
|
GstCapsFeatures *features;
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
|
|
|
|
if (!ret)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
gst_query_parse_allocation (query, &alloc_caps, NULL);
|
|
|
|
features = gst_caps_get_features (alloc_caps, 0);
|
|
|
|
if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
|
|
|
|
gst_gl_context_helper_ensure_context (ctxh);
|
|
|
|
GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
|
|
|
|
ctxh->context, textureCache ? textureCache->ctx : NULL);
|
|
|
|
if (textureCache && textureCache->ctx != ctxh->context) {
|
|
|
|
gst_video_texture_cache_free (textureCache);
|
|
|
|
textureCache = NULL;
|
|
|
|
}
|
|
|
|
textureCache = gst_video_texture_cache_new (ctxh->context);
|
|
|
|
gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
|
|
|
|
}
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
- (void)getSampleBuffer:(CMSampleBufferRef)sbuf
|
|
|
|
timestamp:(GstClockTime *)outTimestamp
|
|
|
|
duration:(GstClockTime *)outDuration
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
2014-12-17 01:20:54 +00:00
|
|
|
CMSampleTimingInfo time_info;
|
2015-04-14 08:05:31 +00:00
|
|
|
GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
|
2014-12-17 01:20:54 +00:00
|
|
|
CMItemCount num_timings;
|
2014-12-26 11:45:41 +00:00
|
|
|
GstClock *clock;
|
|
|
|
CMTime now;
|
2014-12-17 01:20:54 +00:00
|
|
|
|
|
|
|
timestamp = GST_CLOCK_TIME_NONE;
|
|
|
|
duration = GST_CLOCK_TIME_NONE;
|
|
|
|
if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
|
2015-04-14 08:05:31 +00:00
|
|
|
avf_timestamp = gst_util_uint64_scale (GST_SECOND,
|
2014-12-17 01:20:54 +00:00
|
|
|
time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
|
2015-02-12 09:11:19 +00:00
|
|
|
|
|
|
|
if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
|
|
|
|
duration = gst_util_uint64_scale (GST_SECOND,
|
|
|
|
time_info.duration.value, time_info.duration.timescale);
|
2014-12-17 01:20:54 +00:00
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
now = CMClockGetTime(inputClock);
|
2015-04-13 08:55:42 +00:00
|
|
|
input_clock_now = gst_util_uint64_scale (GST_SECOND,
|
2014-12-26 11:45:41 +00:00
|
|
|
now.value, now.timescale);
|
2015-04-14 08:05:31 +00:00
|
|
|
input_clock_diff = input_clock_now - avf_timestamp;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
GST_OBJECT_LOCK (element);
|
|
|
|
clock = GST_ELEMENT_CLOCK (element);
|
2015-04-13 08:55:42 +00:00
|
|
|
if (clock) {
|
|
|
|
running_time = gst_clock_get_time (clock) - element->base_time;
|
|
|
|
/* We use presentationTimeStamp to determine how much time it took
|
|
|
|
* between capturing and receiving the frame in our delegate
|
|
|
|
* (e.g. how long it spent in AVF queues), then we subtract that time
|
|
|
|
* from our running time to get the actual timestamp.
|
|
|
|
*/
|
|
|
|
if (running_time >= input_clock_diff)
|
|
|
|
timestamp = running_time - input_clock_diff;
|
|
|
|
else
|
|
|
|
timestamp = running_time;
|
2015-04-14 08:05:31 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
|
|
|
|
", AVF clock diff: %"GST_TIME_FORMAT
|
|
|
|
", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
|
|
|
|
GST_TIME_ARGS (input_clock_diff),
|
|
|
|
GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
|
2015-04-13 08:55:42 +00:00
|
|
|
} else {
|
|
|
|
/* no clock, can't set timestamps */
|
|
|
|
timestamp = GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
2014-12-26 11:45:41 +00:00
|
|
|
GST_OBJECT_UNLOCK (element);
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
2014-12-26 11:45:41 +00:00
|
|
|
*outTimestamp = timestamp;
|
|
|
|
*outDuration = duration;
|
2010-11-04 13:14:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)updateStatistics
|
|
|
|
{
|
|
|
|
GstClock *clock;
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (element);
|
|
|
|
clock = GST_ELEMENT_CLOCK (element);
|
|
|
|
if (clock != NULL)
|
|
|
|
gst_object_ref (clock);
|
|
|
|
GST_OBJECT_UNLOCK (element);
|
|
|
|
|
|
|
|
if (clock != NULL) {
|
|
|
|
GstClockTime now = gst_clock_get_time (clock);
|
|
|
|
gst_object_unref (clock);
|
|
|
|
|
|
|
|
count++;
|
|
|
|
|
|
|
|
if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
|
|
|
|
if (now - lastSampling >= GST_SECOND) {
|
|
|
|
GST_OBJECT_LOCK (element);
|
|
|
|
fps = count;
|
|
|
|
GST_OBJECT_UNLOCK (element);
|
|
|
|
|
|
|
|
g_object_notify (G_OBJECT (element), "fps");
|
|
|
|
|
|
|
|
lastSampling = now;
|
|
|
|
count = 0;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
lastSampling = now;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Glue code
|
|
|
|
*/
|
|
|
|
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_0,
|
|
|
|
PROP_DEVICE_INDEX,
|
|
|
|
PROP_DO_STATS,
|
2013-11-04 11:14:35 +00:00
|
|
|
PROP_FPS,
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2013-11-04 11:14:35 +00:00
|
|
|
PROP_CAPTURE_SCREEN,
|
|
|
|
PROP_CAPTURE_SCREEN_CURSOR,
|
|
|
|
PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
|
|
|
|
#endif
|
2010-11-04 13:14:09 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
static void gst_avf_video_src_finalize (GObject * obj);
|
|
|
|
static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec);
|
|
|
|
static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec);
|
|
|
|
static GstStateChangeReturn gst_avf_video_src_change_state (
|
|
|
|
GstElement * element, GstStateChange transition);
|
2013-10-28 11:53:26 +00:00
|
|
|
static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
|
|
|
|
GstCaps * filter);
|
2010-11-04 13:14:09 +00:00
|
|
|
static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
|
|
|
|
GstCaps * caps);
|
|
|
|
static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
|
|
|
|
static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
|
|
|
|
static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
|
|
|
|
GstQuery * query);
|
|
|
|
static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
|
|
|
|
static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
|
|
|
|
static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
|
|
|
|
GstBuffer ** buf);
|
2015-12-17 03:49:13 +00:00
|
|
|
static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
|
|
|
|
GstCaps * caps);
|
2016-01-29 04:07:59 +00:00
|
|
|
static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
|
|
|
|
GstQuery * query);
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
|
|
|
|
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
|
|
|
|
GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
|
|
|
|
GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
|
|
|
|
|
|
|
|
gobject_class->finalize = gst_avf_video_src_finalize;
|
|
|
|
gobject_class->get_property = gst_avf_video_src_get_property;
|
|
|
|
gobject_class->set_property = gst_avf_video_src_set_property;
|
|
|
|
|
|
|
|
gstelement_class->change_state = gst_avf_video_src_change_state;
|
|
|
|
|
|
|
|
gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
|
|
|
|
gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
|
|
|
|
gstbasesrc_class->start = gst_avf_video_src_start;
|
|
|
|
gstbasesrc_class->stop = gst_avf_video_src_stop;
|
|
|
|
gstbasesrc_class->query = gst_avf_video_src_query;
|
|
|
|
gstbasesrc_class->unlock = gst_avf_video_src_unlock;
|
|
|
|
gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
|
2015-12-17 03:49:13 +00:00
|
|
|
gstbasesrc_class->fixate = gst_avf_video_src_fixate;
|
2016-01-29 04:07:59 +00:00
|
|
|
gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
gstpushsrc_class->create = gst_avf_video_src_create;
|
|
|
|
|
2013-10-28 11:53:26 +00:00
|
|
|
gst_element_class_set_metadata (gstelement_class,
|
|
|
|
"Video Source (AVFoundation)", "Source/Video",
|
|
|
|
"Reads frames from an iOS AVFoundation device",
|
|
|
|
"Ole André Vadla Ravnås <oleavr@soundrop.com>");
|
|
|
|
|
|
|
|
gst_element_class_add_pad_template (gstelement_class,
|
|
|
|
gst_static_pad_template_get (&src_template));
|
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
|
|
|
|
g_param_spec_int ("device-index", "Device Index",
|
|
|
|
"The zero-based device index",
|
|
|
|
-1, G_MAXINT, DEFAULT_DEVICE_INDEX,
|
|
|
|
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_DO_STATS,
|
|
|
|
g_param_spec_boolean ("do-stats", "Enable statistics",
|
|
|
|
"Enable logging of statistics", DEFAULT_DO_STATS,
|
|
|
|
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_FPS,
|
|
|
|
g_param_spec_int ("fps", "Frames per second",
|
|
|
|
"Last measured framerate, if statistics are enabled",
|
|
|
|
-1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2013-11-04 11:14:35 +00:00
|
|
|
g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
|
|
|
|
g_param_spec_boolean ("capture-screen", "Enable screen capture",
|
|
|
|
"Enable screen capture functionality", FALSE,
|
|
|
|
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
|
|
|
|
g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
|
|
|
|
"Enable cursor capture while capturing screen", FALSE,
|
|
|
|
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
|
|
|
|
g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
|
|
|
|
"Enable mouse clicks capture while capturing screen", FALSE,
|
|
|
|
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
#endif
|
2010-11-04 13:14:09 +00:00
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
|
|
|
|
0, "iOS AVFoundation video source");
|
|
|
|
}
|
|
|
|
|
|
|
|
#define OBJC_CALLOUT_BEGIN() \
|
|
|
|
NSAutoreleasePool *pool; \
|
|
|
|
\
|
|
|
|
pool = [[NSAutoreleasePool alloc] init]
|
|
|
|
#define OBJC_CALLOUT_END() \
|
|
|
|
[pool release]
|
|
|
|
|
2013-10-28 11:53:26 +00:00
|
|
|
|
2010-11-04 13:14:09 +00:00
|
|
|
static void
|
2013-10-28 11:53:26 +00:00
|
|
|
gst_avf_video_src_init (GstAVFVideoSrc * src)
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_avf_video_src_finalize (GObject * obj)
|
|
|
|
{
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
[GST_AVF_VIDEO_SRC_IMPL (obj) release];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
|
|
|
|
GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2013-11-04 11:14:35 +00:00
|
|
|
case PROP_CAPTURE_SCREEN:
|
|
|
|
g_value_set_boolean (value, impl.captureScreen);
|
|
|
|
break;
|
|
|
|
case PROP_CAPTURE_SCREEN_CURSOR:
|
|
|
|
g_value_set_boolean (value, impl.captureScreenCursor);
|
|
|
|
break;
|
|
|
|
case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
|
|
|
|
g_value_set_boolean (value, impl.captureScreenMouseClicks);
|
|
|
|
break;
|
|
|
|
#endif
|
2010-11-04 13:14:09 +00:00
|
|
|
case PROP_DEVICE_INDEX:
|
|
|
|
g_value_set_int (value, impl.deviceIndex);
|
|
|
|
break;
|
|
|
|
case PROP_DO_STATS:
|
|
|
|
g_value_set_boolean (value, impl.doStats);
|
|
|
|
break;
|
|
|
|
case PROP_FPS:
|
|
|
|
GST_OBJECT_LOCK (object);
|
|
|
|
g_value_set_int (value, impl.fps);
|
|
|
|
GST_OBJECT_UNLOCK (object);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_avf_video_src_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
2013-11-07 00:38:20 +00:00
|
|
|
#if !HAVE_IOS
|
2013-11-04 11:14:35 +00:00
|
|
|
case PROP_CAPTURE_SCREEN:
|
|
|
|
impl.captureScreen = g_value_get_boolean (value);
|
|
|
|
break;
|
|
|
|
case PROP_CAPTURE_SCREEN_CURSOR:
|
|
|
|
impl.captureScreenCursor = g_value_get_boolean (value);
|
|
|
|
break;
|
|
|
|
case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
|
|
|
|
impl.captureScreenMouseClicks = g_value_get_boolean (value);
|
|
|
|
break;
|
|
|
|
#endif
|
2010-11-04 13:14:09 +00:00
|
|
|
case PROP_DEVICE_INDEX:
|
|
|
|
impl.deviceIndex = g_value_get_int (value);
|
|
|
|
break;
|
|
|
|
case PROP_DO_STATS:
|
|
|
|
impl.doStats = g_value_get_boolean (value);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstStateChangeReturn
|
|
|
|
gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
|
|
|
|
{
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstCaps *
|
2013-10-28 11:53:26 +00:00
|
|
|
gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
|
2010-11-04 13:14:09 +00:00
|
|
|
{
|
|
|
|
GstCaps *ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_start (GstBaseSrc * basesrc)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_stop (GstBaseSrc * basesrc)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_unlock (GstBaseSrc * basesrc)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
|
|
|
|
{
|
|
|
|
GstFlowReturn ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
2014-11-09 09:25:25 +00:00
|
|
|
|
2015-12-16 03:38:44 +00:00
|
|
|
|
|
|
|
static GstCaps *
|
2015-12-17 03:49:13 +00:00
|
|
|
gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
|
2015-12-16 03:38:44 +00:00
|
|
|
{
|
|
|
|
GstCaps *ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
2016-01-29 04:07:59 +00:00
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
|
|
|
|
GstQuery * query)
|
|
|
|
{
|
|
|
|
gboolean ret;
|
|
|
|
|
|
|
|
OBJC_CALLOUT_BEGIN ();
|
|
|
|
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
|
|
|
|
OBJC_CALLOUT_END ();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|