2010-11-04 13:14:09 +00:00
/ *
2013-02-16 01:44:19 +00:00
* Copyright ( C ) 2010 Ole Andr é Vadla Ravn å s < oleavr @ soundrop . com >
2016-02-09 02:39:18 +00:00
* Copyright ( C ) 2016 Alessandro Decina < twi @ centricular . com >
2010-11-04 13:14:09 +00:00
*
* This library is free software ; you can redistribute it and / or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation ; either
* version 2 of the License , or ( at your option ) any later version .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public
* License along with this library ; if not , write to the
2012-11-03 20:38:00 +00:00
* Free Software Foundation , Inc . , 51 Franklin St , Fifth Floor ,
* Boston , MA 02110 -1301 , USA .
2010-11-04 13:14:09 +00:00
* /
2013-11-07 00:37:38 +00:00
# ifdef HAVE_CONFIG _H
# include "config.h"
# endif
2010-11-04 13:14:09 +00:00
# include "avfvideosrc.h"
2016-01-29 04:07:59 +00:00
# include "glcontexthelper.h"
2010-11-04 13:14:09 +00:00
# import < AVFoundation / AVFoundation . h >
2015-02-25 12:52:21 +00:00
# if ! HAVE_IOS
# import < AppKit / AppKit . h >
# endif
2010-11-04 13:14:09 +00:00
# include < gst / video / video . h >
2015-01-08 13:16:17 +00:00
# include < gst / gl / gstglcontext . h >
2013-04-24 20:22:28 +00:00
# include "coremediabuffer.h"
2019-08-28 08:59:35 +00:00
# include "videotexturecache-gl.h"
2010-11-04 13:14:09 +00:00
# define DEFAULT_DEVICE _INDEX -1
2017-02-08 11:23:09 +00:00
# define DEFAULT_POSITION GST_AVF _VIDEO _SOURCE _POSITION _DEFAULT
# define DEFAULT_ORIENTATION GST_AVF _VIDEO _SOURCE _ORIENTATION _DEFAULT
# define DEFAULT_DEVICE _TYPE GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _DEFAULT
2010-11-04 13:14:09 +00:00
# define DEFAULT_DO _STATS FALSE
# define DEVICE_FPS _N 25
# define DEVICE_FPS _D 1
# define BUFFER_QUEUE _SIZE 2
GST_DEBUG _CATEGORY ( gst_avf _video _src _debug ) ;
# define GST_CAT _DEFAULT gst_avf _video _src _debug
2019-01-27 03:09:59 +00:00
static GstVideoFormat get_gst _video _format ( NSNumber * pixel_format ) ;
static CMVideoDimensions
get_oriented _dimensions ( GstAVFVideoSourceOrientation orientation , CMVideoDimensions dimensions ) ;
2010-11-04 13:14:09 +00:00
static GstStaticPadTemplate src_template = GST_STATIC _PAD _TEMPLATE ( "src" ,
GST_PAD _SRC ,
GST_PAD _ALWAYS ,
2015-12-16 02:47:00 +00:00
GST_STATIC _CAPS (
# if ! HAVE_IOS
GST_VIDEO _CAPS _MAKE _WITH _FEATURES
( GST_CAPS _FEATURE _MEMORY _GL _MEMORY ,
"UYVY" ) ", "
"texture-target = " GST_GL _TEXTURE _TARGET _RECTANGLE _STR ";"
# else
GST_VIDEO _CAPS _MAKE _WITH _FEATURES
( GST_CAPS _FEATURE _MEMORY _GL _MEMORY ,
2016-01-06 00:29:15 +00:00
"NV12" ) ", "
2015-12-16 02:47:00 +00:00
"texture-target = " GST_GL _TEXTURE _TARGET _2D _STR "; "
# endif
"video/x-raw, "
2013-10-31 13:03:58 +00:00
"format = (string) { NV12, UYVY, YUY2 }, "
"framerate = " GST_VIDEO _FPS _RANGE ", "
"width = " GST_VIDEO _SIZE _RANGE ", "
"height = " GST_VIDEO _SIZE _RANGE "; "
"video/x-raw, "
2014-09-10 15:38:52 +00:00
"format = (string) BGRA, "
2013-10-31 13:03:58 +00:00
"framerate = " GST_VIDEO _FPS _RANGE ", "
"width = " GST_VIDEO _SIZE _RANGE ", "
"height = " GST_VIDEO _SIZE _RANGE "; "
2013-10-30 12:39:24 +00:00
) ) ;
2010-11-04 13:14:09 +00:00
typedef enum _QueueState {
NO_BUFFERS = 1 ,
HAS_BUFFER _OR _STOP _REQUEST ,
} QueueState ;
2013-10-28 11:53:26 +00:00
# define gst_avf _video _src _parent _class parent_class
G_DEFINE _TYPE ( GstAVFVideoSrc , gst_avf _video _src , GST_TYPE _PUSH _SRC ) ;
2010-11-04 13:14:09 +00:00
2017-02-08 11:23:09 +00:00
# define GST_TYPE _AVF _VIDEO _SOURCE _POSITION ( gst_avf _video _source _position _get _type ( ) )
static GType
gst_avf _video _source _position _get _type ( void )
{
static GType avf_video _source _position _type = 0 ;
if ( ! avf_video _source _position _type ) {
static GEnumValue position_types [ ] = {
{ GST_AVF _VIDEO _SOURCE _POSITION _FRONT , "Front-facing camera" , "front" } ,
{ GST_AVF _VIDEO _SOURCE _POSITION _BACK , "Back-facing camera" , "back" } ,
{ GST_AVF _VIDEO _SOURCE _POSITION _DEFAULT , "Default" , "default" } ,
{ 0 , NULL , NULL } ,
} ;
avf_video _source _position _type =
g_enum _register _static ( "GstAVFVideoSourcePosition" ,
position_types ) ;
}
return avf_video _source _position _type ;
}
# define GST_TYPE _AVF _VIDEO _SOURCE _ORIENTATION ( gst_avf _video _source _orientation _get _type ( ) )
static GType
gst_avf _video _source _orientation _get _type ( void )
{
static GType avf_video _source _orientation _type = 0 ;
if ( ! avf_video _source _orientation _type ) {
static GEnumValue orientation_types [ ] = {
{ GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT , "Indicates that video should be oriented vertically, top at the top." , "portrait" } ,
{ GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT _UPSIDE _DOWN , "Indicates that video should be oriented vertically, top at the bottom." , "portrat-upside-down" } ,
{ GST_AVF _VIDEO _SOURCE _ORIENTATION _LANDSCAPE _RIGHT , "Indicates that video should be oriented horizontally, top on the left." , "landscape-right" } ,
{ GST_AVF _VIDEO _SOURCE _ORIENTATION _LANDSCAPE _LEFT , "Indicates that video should be oriented horizontally, top on the right." , "landscape-left" } ,
{ GST_AVF _VIDEO _SOURCE _ORIENTATION _DEFAULT , "Default" , "default" } ,
{ 0 , NULL , NULL } ,
} ;
avf_video _source _orientation _type =
g_enum _register _static ( "GstAVFVideoSourceOrientation" ,
orientation_types ) ;
}
return avf_video _source _orientation _type ;
}
# define GST_TYPE _AVF _VIDEO _SOURCE _DEVICE _TYPE ( gst_avf _video _source _device _type _get _type ( ) )
static GType
gst_avf _video _source _device _type _get _type ( void )
{
static GType avf_video _source _device _type _type = 0 ;
if ( ! avf_video _source _device _type _type ) {
static GEnumValue device_type _types [ ] = {
{ GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _WIDE _ANGLE _CAMERA , "A built-in wide angle camera. These devices are suitable for general purpose use." , "wide-angle" } ,
{ GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _TELEPHOTO _CAMERA , "A built-in camera device with a longer focal length than a wide-angle camera." , "telephoto" } ,
{ GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _DUAL _CAMERA , "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device." , "dual" } ,
{ GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _DEFAULT , "Default" , "default" } ,
{ 0 , NULL , NULL } ,
} ;
avf_video _source _device _type _type =
g_enum _register _static ( "GstAVFVideoSourceDeviceType" ,
device_type _types ) ;
}
return avf_video _source _device _type _type ;
}
2010-11-04 13:14:09 +00:00
@ interface GstAVFVideoSrcImpl : NSObject < AVCaptureVideoDataOutputSampleBufferDelegate > {
GstElement * element ;
GstBaseSrc * baseSrc ;
GstPushSrc * pushSrc ;
gint deviceIndex ;
2019-01-22 12:52:55 +00:00
const gchar * deviceName ;
2017-02-08 11:23:09 +00:00
GstAVFVideoSourcePosition position ;
GstAVFVideoSourceOrientation orientation ;
GstAVFVideoSourceDeviceType deviceType ;
2010-11-04 13:14:09 +00:00
BOOL doStats ;
AVCaptureSession * session ;
2013-11-04 11:14:35 +00:00
AVCaptureInput * input ;
2010-11-04 13:14:09 +00:00
AVCaptureVideoDataOutput * output ;
AVCaptureDevice * device ;
2015-04-17 02:12:10 +00:00
AVCaptureConnection * connection ;
2014-12-26 11:45:41 +00:00
CMClockRef inputClock ;
2010-11-04 13:14:09 +00:00
dispatch_queue _t mainQueue ;
dispatch_queue _t workerQueue ;
NSConditionLock * bufQueueLock ;
NSMutableArray * bufQueue ;
BOOL stopRequest ;
2013-10-29 15:37:16 +00:00
GstCaps * caps ;
2012-10-30 18:30:38 +00:00
GstVideoFormat format ;
2010-11-04 13:14:09 +00:00
gint width , height ;
2014-12-17 01:20:54 +00:00
GstClockTime latency ;
2010-11-04 13:14:09 +00:00
guint64 offset ;
GstClockTime lastSampling ;
guint count ;
gint fps ;
2013-11-11 13:24:08 +00:00
BOOL captureScreen ;
BOOL captureScreenCursor ;
BOOL captureScreenMouseClicks ;
2014-04-09 10:47:12 +00:00
BOOL useVideoMeta ;
2016-01-29 04:07:59 +00:00
GstGLContextHelper * ctxh ;
2015-12-10 05:22:08 +00:00
GstVideoTextureCache * textureCache ;
2010-11-04 13:14:09 +00:00
}
- ( id ) init ;
- ( id ) initWithSrc : ( GstPushSrc * ) src ;
- ( void ) finalize ;
@ property int deviceIndex ;
2019-01-22 12:52:55 +00:00
@ property const gchar * deviceName ;
2017-02-08 11:23:09 +00:00
@ property GstAVFVideoSourcePosition position ;
@ property GstAVFVideoSourceOrientation orientation ;
@ property GstAVFVideoSourceDeviceType deviceType ;
2010-11-04 13:14:09 +00:00
@ property BOOL doStats ;
@ property int fps ;
2013-11-04 11:14:35 +00:00
@ property BOOL captureScreen ;
@ property BOOL captureScreenCursor ;
@ property BOOL captureScreenMouseClicks ;
2010-11-04 13:14:09 +00:00
2013-11-04 11:14:35 +00:00
- ( BOOL ) openScreenInput ;
- ( BOOL ) openDeviceInput ;
2010-11-04 13:14:09 +00:00
- ( BOOL ) openDevice ;
- ( void ) closeDevice ;
2013-10-31 13:03:58 +00:00
- ( GstVideoFormat ) getGstVideoFormat : ( NSNumber * ) pixel_format ;
2015-02-25 12:52:21 +00:00
# if ! HAVE_IOS
- ( CGDirectDisplayID ) getDisplayIdFromDeviceIndex ;
2016-08-19 09:46:24 +00:00
- ( float ) getScaleFactorFromDeviceIndex ;
2015-02-25 12:52:21 +00:00
# endif
2015-12-16 02:47:00 +00:00
- ( GstCaps * ) getDeviceCaps ;
2013-10-31 13:03:58 +00:00
- ( BOOL ) setDeviceCaps : ( GstVideoInfo * ) info ;
- ( BOOL ) getSessionPresetCaps : ( GstCaps * ) result ;
- ( BOOL ) setSessionPresetCaps : ( GstVideoInfo * ) info ;
2010-11-04 13:14:09 +00:00
- ( GstCaps * ) getCaps ;
2013-10-29 15:37:16 +00:00
- ( BOOL ) setCaps : ( GstCaps * ) new_caps ;
2010-11-04 13:14:09 +00:00
- ( BOOL ) start ;
- ( BOOL ) stop ;
- ( BOOL ) unlock ;
- ( BOOL ) unlockStop ;
- ( BOOL ) query : ( GstQuery * ) query ;
2016-02-09 02:42:48 +00:00
- ( void ) setContext : ( GstContext * ) context ;
2010-11-04 13:14:09 +00:00
- ( GstStateChangeReturn ) changeState : ( GstStateChange ) transition ;
- ( GstFlowReturn ) create : ( GstBuffer * * ) buf ;
2015-12-16 03:38:44 +00:00
- ( GstCaps * ) fixate : ( GstCaps * ) caps ;
2016-01-29 04:07:59 +00:00
- ( BOOL ) decideAllocation : ( GstQuery * ) query ;
2010-11-04 13:14:09 +00:00
- ( void ) updateStatistics ;
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) sampleBuffer
fromConnection : ( AVCaptureConnection * ) connection ;
@ end
2017-03-08 13:16:43 +00:00
# if HAVE_IOS
2017-02-08 11:23:09 +00:00
static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType ( GstAVFVideoSourceDeviceType deviceType ) {
switch ( deviceType ) {
case GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _WIDE _ANGLE _CAMERA :
return AVCaptureDeviceTypeBuiltInWideAngleCamera ;
case GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _TELEPHOTO _CAMERA :
return AVCaptureDeviceTypeBuiltInTelephotoCamera ;
case GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _BUILT _IN _DUAL _CAMERA :
2017-03-09 08:09:40 +00:00
return AVCaptureDeviceTypeBuiltInDuoCamera ;
2017-02-08 11:23:09 +00:00
case GST_AVF _VIDEO _SOURCE _DEVICE _TYPE _DEFAULT :
g_assert _not _reached ( ) ;
}
}
static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition ( GstAVFVideoSourcePosition position ) {
switch ( position ) {
case GST_AVF _VIDEO _SOURCE _POSITION _FRONT :
return AVCaptureDevicePositionFront ;
case GST_AVF _VIDEO _SOURCE _POSITION _BACK :
return AVCaptureDevicePositionBack ;
case GST_AVF _VIDEO _SOURCE _POSITION _DEFAULT :
g_assert _not _reached ( ) ;
}
}
static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation ( GstAVFVideoSourceOrientation orientation ) {
switch ( orientation ) {
case GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT :
return AVCaptureVideoOrientationPortrait ;
case GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT _UPSIDE _DOWN :
return AVCaptureVideoOrientationPortraitUpsideDown ;
case GST_AVF _VIDEO _SOURCE _ORIENTATION _LANDSCAPE _LEFT :
return AVCaptureVideoOrientationLandscapeLeft ;
case GST_AVF _VIDEO _SOURCE _ORIENTATION _LANDSCAPE _RIGHT :
return AVCaptureVideoOrientationLandscapeRight ;
case GST_AVF _VIDEO _SOURCE _ORIENTATION _DEFAULT :
g_assert _not _reached ( ) ;
}
}
2017-03-08 13:16:43 +00:00
# endif
2010-11-04 13:14:09 +00:00
@ implementation GstAVFVideoSrcImpl
2019-01-22 12:52:55 +00:00
@ synthesize deviceIndex , deviceName , position , orientation , deviceType , doStats ,
2017-02-08 11:23:09 +00:00
fps , captureScreen , captureScreenCursor , captureScreenMouseClicks ;
2013-11-06 17:50:46 +00:00
2010-11-04 13:14:09 +00:00
- ( id ) init
{
return [ self initWithSrc : NULL ] ;
}
- ( id ) initWithSrc : ( GstPushSrc * ) src
{
if ( ( self = [ super init ] ) ) {
element = GST_ELEMENT _CAST ( src ) ;
baseSrc = GST_BASE _SRC _CAST ( src ) ;
pushSrc = src ;
deviceIndex = DEFAULT_DEVICE _INDEX ;
2019-01-22 12:52:55 +00:00
deviceName = NULL ;
2017-02-08 11:23:09 +00:00
position = DEFAULT_POSITION ;
orientation = DEFAULT_ORIENTATION ;
deviceType = DEFAULT_DEVICE _TYPE ;
2013-11-04 11:14:35 +00:00
captureScreen = NO ;
captureScreenCursor = NO ;
captureScreenMouseClicks = NO ;
2014-04-09 10:47:12 +00:00
useVideoMeta = NO ;
2015-01-08 13:16:17 +00:00
textureCache = NULL ;
2016-01-29 04:07:59 +00:00
ctxh = gst_gl _context _helper _new ( element ) ;
2013-10-28 18:22:13 +00:00
mainQueue =
dispatch_queue _create ( "org.freedesktop.gstreamer.avfvideosrc.main" , NULL ) ;
2010-11-04 13:14:09 +00:00
workerQueue =
2013-10-28 18:22:13 +00:00
dispatch_queue _create ( "org.freedesktop.gstreamer.avfvideosrc.output" , NULL ) ;
2010-11-04 13:14:09 +00:00
gst_base _src _set _live ( baseSrc , TRUE ) ;
gst_base _src _set _format ( baseSrc , GST_FORMAT _TIME ) ;
}
return self ;
}
- ( void ) finalize
{
mainQueue = NULL ;
workerQueue = NULL ;
}
2013-11-04 11:14:35 +00:00
- ( BOOL ) openDeviceInput
{
NSString * mediaType = AVMediaTypeVideo ;
NSError * err ;
2015-02-25 12:52:21 +00:00
if ( deviceIndex = = DEFAULT_DEVICE _INDEX ) {
2017-03-08 13:16:43 +00:00
# ifdef HAVE_IOS
2017-02-08 11:23:09 +00:00
if ( deviceType ! = DEFAULT_DEVICE _TYPE && position ! = DEFAULT_POSITION ) {
device = [ AVCaptureDevice
defaultDeviceWithDeviceType : GstAVFVideoSourceDeviceType2AVCaptureDeviceType ( deviceType )
mediaType : mediaType
position : GstAVFVideoSourcePosition2AVCaptureDevicePosition ( position ) ] ;
} else {
device = [ AVCaptureDevice defaultDeviceWithMediaType : mediaType ] ;
}
2017-03-08 13:16:43 +00:00
# else
device = [ AVCaptureDevice defaultDeviceWithMediaType : mediaType ] ;
# endif
2013-11-04 11:14:35 +00:00
if ( device = = nil ) {
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_FOUND ,
( "No video capture devices found" ) , ( NULL ) ) ;
return NO ;
}
2017-02-08 11:23:09 +00:00
} else { // deviceIndex takes priority over position and deviceType
2013-11-04 11:14:35 +00:00
NSArray * devices = [ AVCaptureDevice devicesWithMediaType : mediaType ] ;
if ( deviceIndex >= [ devices count ] ) {
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_FOUND ,
( "Invalid video capture device index" ) , ( NULL ) ) ;
return NO ;
}
device = [ devices objectAtIndex : deviceIndex ] ;
}
g_assert ( device ! = nil ) ;
2019-01-22 12:52:55 +00:00
deviceName = [ [ device localizedName ] UTF8String ] ;
GST_INFO ( "Opening '%s'" , deviceName ) ;
2013-11-04 11:14:35 +00:00
input = [ AVCaptureDeviceInput deviceInputWithDevice : device
error : & err ] ;
if ( input = = nil ) {
GST_ELEMENT _ERROR ( element , RESOURCE , BUSY ,
( "Failed to open device: %s" ,
[ [ err localizedDescription ] UTF8String ] ) ,
( NULL ) ) ;
device = nil ;
return NO ;
}
return YES ;
}
- ( BOOL ) openScreenInput
{
# if HAVE_IOS
return NO ;
# else
2015-02-25 12:52:21 +00:00
CGDirectDisplayID displayId ;
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Opening screen input" ) ;
2015-02-25 12:52:21 +00:00
displayId = [ self getDisplayIdFromDeviceIndex ] ;
if ( displayId = = 0 )
return NO ;
2013-11-04 11:14:35 +00:00
AVCaptureScreenInput * screenInput =
[ [ AVCaptureScreenInput alloc ] initWithDisplayID : displayId ] ;
2013-11-07 11:29:34 +00:00
2013-11-04 11:14:35 +00:00
@ try {
[ screenInput setValue : [ NSNumber numberWithBool : captureScreenCursor ]
forKey : @ "capturesCursor" ] ;
} @ catch ( NSException * exception ) {
2013-11-07 11:29:34 +00:00
if ( ! [ [ exception name ] isEqualToString : NSUndefinedKeyException ] ) {
2019-09-02 19:08:44 +00:00
GST_WARNING ( "An unexpected error occurred: %s" ,
2013-11-04 11:14:35 +00:00
[ [ exception reason ] UTF8String ] ) ;
}
GST_WARNING ( "Capturing cursor is only supported in OS X >= 10.8" ) ;
}
screenInput . capturesMouseClicks = captureScreenMouseClicks ;
input = screenInput ;
return YES ;
# endif
}
2010-11-04 13:14:09 +00:00
- ( BOOL ) openDevice
{
BOOL success = NO , * successPtr = & success ;
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Opening device" ) ;
2019-11-30 13:08:06 +00:00
// Since Mojave , permissions are now supposed to be explicitly granted
// before performing anything on a device
if ( @ available ( macOS 10.14 , * ) ) {
2019-12-11 07:17:37 +00:00
if ( captureScreen )
goto checked ;
2019-11-30 13:08:06 +00:00
// Check if permission has already been granted ( or denied )
AVAuthorizationStatus authStatus = [ AVCaptureDevice authorizationStatusForMediaType : AVMediaTypeVideo ] ;
switch ( authStatus ) {
case AVAuthorizationStatusDenied :
// The user has explicitly denied permission for media capture .
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_AUTHORIZED ,
( "Device video access permission has been explicitly denied before" ) , ( "Authorization status: %d" , ( int ) authStatus ) ) ;
return success ;
case AVAuthorizationStatusRestricted :
// The user is not allowed to access media capture devices .
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_AUTHORIZED ,
( "Device video access permission cannot be granted by the user" ) , ( "Authorization status: %d" , ( int ) authStatus ) ) ;
return success ;
case AVAuthorizationStatusAuthorized :
// The user has explicitly granted permission for media capture ,
// or explicit user permission is not necessary for the media type in question .
GST_DEBUG _OBJECT ( element , "Device video access permission has already been granted" ) ;
break ;
case AVAuthorizationStatusNotDetermined :
2020-06-23 22:31:51 +00:00
;
2019-11-30 13:08:06 +00:00
// Explicit user permission is required for media capture ,
// but the user has not yet granted or denied such permission .
2020-06-23 22:31:51 +00:00
dispatch_semaphore _t sema = dispatch_semaphore _create ( 0 ) ;
2019-11-30 13:08:06 +00:00
dispatch_sync ( mainQueue , ^ {
[ AVCaptureDevice requestAccessForMediaType : AVMediaTypeVideo completionHandler : ^ ( BOOL granted ) {
GST_DEBUG _OBJECT ( element , "Device video access permission %s" , granted ? "granted" : "not granted" ) ;
2020-06-23 22:31:51 +00:00
dispatch_semaphore _signal ( sema ) ;
2019-11-30 13:08:06 +00:00
} ] ;
} ) ;
2020-06-23 22:31:51 +00:00
// Block on dialog being answered
if ( ! [ NSThread isMainThread ] ) {
dispatch_semaphore _wait ( sema , DISPATCH_TIME _FOREVER ) ;
} else {
while ( dispatch_semaphore _wait ( sema , DISPATCH_TIME _NOW ) ) {
[ [ NSRunLoop currentRunLoop ] runMode : NSDefaultRunLoopMode beforeDate : [ NSDate dateWithTimeIntervalSinceNow : 0 ] ] ;
}
}
2019-11-30 13:08:06 +00:00
// Check if permission has been granted
AVAuthorizationStatus videoAuthorizationStatus = [ AVCaptureDevice authorizationStatusForMediaType : AVMediaTypeVideo ] ;
if ( videoAuthorizationStatus ! = AVAuthorizationStatusAuthorized ) {
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_AUTHORIZED ,
( "Device video access permission has just been denied" ) , ( "Authorization status: %d" , ( int ) videoAuthorizationStatus ) ) ;
return success ;
}
}
}
2019-12-11 07:17:37 +00:00
checked :
2013-10-30 14:51:50 +00:00
dispatch_sync ( mainQueue , ^ {
2013-11-04 11:14:35 +00:00
BOOL ret ;
if ( captureScreen )
ret = [ self openScreenInput ] ;
else
ret = [ self openDeviceInput ] ;
if ( ! ret )
2010-11-04 13:14:09 +00:00
return ;
output = [ [ AVCaptureVideoDataOutput alloc ] init ] ;
[ output setSampleBufferDelegate : self
queue : workerQueue ] ;
output . alwaysDiscardsLateVideoFrames = YES ;
output . videoSettings = nil ; / * device native format * /
session = [ [ AVCaptureSession alloc ] init ] ;
[ session addInput : input ] ;
[ session addOutput : output ] ;
2015-04-17 02:12:10 +00:00
/ * retained by session * /
connection = [ [ output connections ] firstObject ] ;
2017-03-08 13:16:43 +00:00
# ifdef HAVE_IOS
2017-02-08 11:23:09 +00:00
if ( orientation ! = DEFAULT_ORIENTATION )
connection . videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation ( orientation ) ;
2017-03-08 13:16:43 +00:00
# endif
2015-04-17 02:12:10 +00:00
inputClock = ( ( AVCaptureInputPort * ) connection . inputPorts [ 0 ] ) . clock ;
2010-11-04 13:14:09 +00:00
* successPtr = YES ;
} ) ;
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Opening device %s" , success ? "succeed" : "failed" ) ;
2010-11-04 13:14:09 +00:00
return success ;
}
- ( void ) closeDevice
{
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Closing device" ) ;
2013-10-30 14:51:50 +00:00
dispatch_sync ( mainQueue , ^ {
2010-11-04 13:14:09 +00:00
g_assert ( ! [ session isRunning ] ) ;
2015-04-17 02:12:10 +00:00
connection = nil ;
inputClock = nil ;
2010-11-04 13:14:09 +00:00
[ session removeInput : input ] ;
[ session removeOutput : output ] ;
session = nil ;
input = nil ;
output = nil ;
2013-11-04 11:14:35 +00:00
if ( ! captureScreen ) {
device = nil ;
}
2013-10-29 15:37:16 +00:00
if ( caps )
gst_caps _unref ( caps ) ;
2015-02-18 17:06:09 +00:00
caps = NULL ;
2010-11-04 13:14:09 +00:00
} ) ;
}
2013-10-31 13:03:58 +00:00
# define GST_AVF _CAPS _NEW ( format , w , h , fps_n , fps_d ) \
2013-10-28 11:53:26 +00:00
( gst_caps _new _simple ( "video/x-raw" , \
"width" , G_TYPE _INT , w , \
"height" , G_TYPE _INT , h , \
"format" , G_TYPE _STRING , gst_video _format _to _string ( format ) , \
2013-10-31 13:03:58 +00:00
"framerate" , GST_TYPE _FRACTION , ( fps_n ) , ( fps_d ) , \
2013-10-28 11:53:26 +00:00
NULL ) )
2010-11-04 13:14:09 +00:00
2015-06-16 14:18:52 +00:00
# define GST_AVF _FPS _RANGE _CAPS _NEW ( format , w , h , min_fps _n , min_fps _d , max_fps _n , max_fps _d ) \
( gst_caps _new _simple ( "video/x-raw" , \
"width" , G_TYPE _INT , w , \
"height" , G_TYPE _INT , h , \
"format" , G_TYPE _STRING , gst_video _format _to _string ( format ) , \
"framerate" , GST_TYPE _FRACTION _RANGE , ( min_fps _n ) , ( min_fps _d ) , ( max_fps _n ) , ( max_fps _d ) , \
NULL ) )
2013-10-31 13:03:58 +00:00
- ( GstVideoFormat ) getGstVideoFormat : ( NSNumber * ) pixel_format
2010-11-04 13:14:09 +00:00
{
2019-01-27 03:09:59 +00:00
GstVideoFormat gst_format = get_gst _video _format ( pixel_format ) ;
if ( gst_format = = GST_VIDEO _FORMAT _UNKNOWN ) {
2013-11-06 17:53:25 +00:00
GST_LOG _OBJECT ( element , "Pixel format %s is not handled by avfvideosrc" ,
[ [ pixel_format stringValue ] UTF8String ] ) ;
2013-10-31 13:03:58 +00:00
}
return gst_format ;
}
2013-10-29 17:33:11 +00:00
2015-02-25 12:52:21 +00:00
# if ! HAVE_IOS
- ( CGDirectDisplayID ) getDisplayIdFromDeviceIndex
{
NSDictionary * description ;
NSNumber * displayId ;
NSArray * screens = [ NSScreen screens ] ;
if ( deviceIndex = = DEFAULT_DEVICE _INDEX )
return kCGDirectMainDisplay ;
if ( deviceIndex >= [ screens count ] ) {
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_FOUND ,
( "Invalid screen capture device index" ) , ( NULL ) ) ;
return 0 ;
}
description = [ [ screens objectAtIndex : deviceIndex ] deviceDescription ] ;
displayId = [ description objectForKey : @ "NSScreenNumber" ] ;
return [ displayId unsignedIntegerValue ] ;
}
2016-08-19 09:46:24 +00:00
- ( float ) getScaleFactorFromDeviceIndex
{
NSArray * screens = [ NSScreen screens ] ;
if ( deviceIndex = = DEFAULT_DEVICE _INDEX )
return [ [ NSScreen mainScreen ] backingScaleFactor ] ;
if ( deviceIndex >= [ screens count ] ) {
GST_ELEMENT _ERROR ( element , RESOURCE , NOT_FOUND ,
( "Invalid screen capture device index" ) , ( NULL ) ) ;
return 1.0 ;
}
return [ [ screens objectAtIndex : deviceIndex ] backingScaleFactor ] ;
}
2015-02-25 12:52:21 +00:00
# endif
2017-02-08 11:23:09 +00:00
- ( CMVideoDimensions ) orientedDimensions : ( CMVideoDimensions ) dimensions
{
2019-01-27 03:09:59 +00:00
return get_oriented _dimensions ( orientation , dimensions ) ;
2017-02-08 11:23:09 +00:00
}
2015-12-16 02:47:00 +00:00
- ( GstCaps * ) getDeviceCaps
2013-10-31 13:03:58 +00:00
{
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Getting device caps" ) ;
2019-01-27 03:09:59 +00:00
GstCaps * device_caps = gst_av _capture _device _get _caps ( device , output , orientation ) ;
GST_DEBUG _OBJECT ( element , "Device returned the following caps %" GST_PTR _FORMAT , device_caps ) ;
2013-11-06 17:53:25 +00:00
2019-01-27 03:09:59 +00:00
return device_caps ;
2013-10-31 13:03:58 +00:00
}
- ( BOOL ) setDeviceCaps : ( GstVideoInfo * ) info
{
double framerate ;
gboolean found_format = FALSE , found_framerate = FALSE ;
NSArray * formats = [ device valueForKey : @ "formats" ] ;
gst_util _fraction _to _double ( info -> fps_n , info -> fps_d , & framerate ) ;
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Setting device caps" ) ;
2013-10-31 13:03:58 +00:00
if ( [ device lockForConfiguration : NULL ] = = YES ) {
2013-11-07 11:20:31 +00:00
for ( NSObject * f in formats ) {
2017-02-08 11:23:09 +00:00
CMFormatDescriptionRef formatDescription = ( __bridge CMFormatDescriptionRef ) [ f performSelector : @ selector ( formatDescription ) ] ;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions ( formatDescription ) ;
dimensions = [ self orientedDimensions : dimensions ] ;
2013-10-31 13:03:58 +00:00
if ( dimensions . width = = info -> width && dimensions . height = = info -> height ) {
found_format = TRUE ;
2013-11-07 11:20:31 +00:00
[ device setValue : f forKey : @ "activeFormat" ] ;
for ( NSObject * rate in [ f valueForKey : @ "videoSupportedFrameRateRanges" ] ) {
2015-06-16 14:18:52 +00:00
gdouble min_frame _rate , max_frame _rate ;
2013-11-07 11:20:31 +00:00
2015-06-16 14:18:52 +00:00
[ [ rate valueForKey : @ "minFrameRate" ] getValue : & min_frame _rate ] ;
2013-11-07 11:20:31 +00:00
[ [ rate valueForKey : @ "maxFrameRate" ] getValue : & max_frame _rate ] ;
2015-06-16 14:18:52 +00:00
if ( ( framerate >= min_frame _rate - 0.00001 ) &&
( framerate <= max_frame _rate + 0.00001 ) ) {
2016-01-27 03:36:36 +00:00
NSValue * frame_duration _value ;
2013-10-31 13:03:58 +00:00
found_framerate = TRUE ;
2016-01-27 03:36:36 +00:00
if ( min_frame _rate = = max_frame _rate ) {
/ * on mac we get tight ranges and an exception is raised if the
* frame duration doesn ' t match the one reported in the range to
* the last decimal point
* /
frame_duration _value = [ rate valueForKey : @ "minFrameDuration" ] ;
} else {
2016-02-25 09:34:40 +00:00
// Invert fps_n and fps_d to get frame duration value and timescale ( or numerator and denominator )
frame_duration _value = [ NSValue valueWithCMTime : CMTimeMake ( info -> fps_d , info -> fps_n ) ] ;
2016-01-27 03:36:36 +00:00
}
[ device setValue : frame_duration _value forKey : @ "activeVideoMinFrameDuration" ] ;
2013-10-31 13:03:58 +00:00
@ try {
/ * Only available on OSX >= 10.8 and iOS >= 7.0 * /
2016-01-27 03:36:36 +00:00
[ device setValue : frame_duration _value forKey : @ "activeVideoMaxFrameDuration" ] ;
2013-10-31 13:03:58 +00:00
} @ catch ( NSException * exception ) {
2013-11-07 11:29:34 +00:00
if ( ! [ [ exception name ] isEqualToString : NSUndefinedKeyException ] ) {
2019-09-02 19:08:44 +00:00
GST_WARNING ( "An unexcepted error occurred: %s" ,
2013-10-31 13:03:58 +00:00
[ exception . reason UTF8String ] ) ;
}
}
break ;
}
}
2022-04-28 15:02:26 +00:00
if ( found_framerate ) {
break ;
}
2013-10-31 13:03:58 +00:00
}
}
if ( ! found_format ) {
GST_WARNING ( "Unsupported capture dimensions %dx%d" , info -> width , info -> height ) ;
return NO ;
}
if ( ! found_framerate ) {
GST_WARNING ( "Unsupported capture framerate %d/%d" , info -> fps_n , info -> fps_d ) ;
return NO ;
}
} else {
GST_WARNING ( "Couldn't lock device for configuration" ) ;
return NO ;
}
return YES ;
}
- ( BOOL ) getSessionPresetCaps : ( GstCaps * ) result
{
NSArray * pixel_formats = output . availableVideoCVPixelFormatTypes ;
for ( NSNumber * pixel_format in pixel_formats ) {
GstVideoFormat gst_format = [ self getGstVideoFormat : pixel_format ] ;
if ( gst_format = = GST_VIDEO _FORMAT _UNKNOWN )
continue ;
2010-11-04 13:14:09 +00:00
2013-10-30 12:39:24 +00:00
# if HAVE_IOS
2012-10-30 18:30:38 +00:00
if ( [ session canSetSessionPreset : AVCaptureSessionPreset1920x1080 ] )
2013-10-31 13:03:58 +00:00
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 1920 , 1080 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
# endif
2013-11-06 17:49:35 +00:00
if ( [ session canSetSessionPreset : AVCaptureSessionPreset1280x720 ] )
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 1280 , 720 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
if ( [ session canSetSessionPreset : AVCaptureSessionPreset640x480 ] )
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 640 , 480 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
if ( [ session canSetSessionPreset : AVCaptureSessionPresetMedium ] )
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 480 , 360 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
if ( [ session canSetSessionPreset : AVCaptureSessionPreset352x288 ] )
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 352 , 288 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
if ( [ session canSetSessionPreset : AVCaptureSessionPresetLow ] )
gst_caps _append ( result , GST_AVF _CAPS _NEW ( gst_format , 192 , 144 , DEVICE_FPS _N , DEVICE_FPS _D ) ) ;
2013-10-31 13:03:58 +00:00
}
2013-11-06 17:53:25 +00:00
GST_LOG _OBJECT ( element , "Session presets returned the following caps %" GST_PTR _FORMAT , result ) ;
2013-10-31 13:03:58 +00:00
return YES ;
}
- ( BOOL ) setSessionPresetCaps : ( GstVideoInfo * ) info ;
{
2013-11-06 17:53:25 +00:00
GST_DEBUG _OBJECT ( element , "Setting session presset caps" ) ;
2013-10-31 13:03:58 +00:00
if ( [ device lockForConfiguration : NULL ] ! = YES ) {
GST_WARNING ( "Couldn't lock device for configuration" ) ;
return NO ;
}
switch ( info -> width ) {
case 192 :
session . sessionPreset = AVCaptureSessionPresetLow ;
break ;
case 352 :
session . sessionPreset = AVCaptureSessionPreset352x288 ;
break ;
case 480 :
session . sessionPreset = AVCaptureSessionPresetMedium ;
break ;
case 640 :
session . sessionPreset = AVCaptureSessionPreset640x480 ;
break ;
case 1280 :
session . sessionPreset = AVCaptureSessionPreset1280x720 ;
break ;
# if HAVE_IOS
case 1920 :
session . sessionPreset = AVCaptureSessionPreset1920x1080 ;
break ;
2013-10-30 12:39:24 +00:00
# endif
2013-10-31 13:03:58 +00:00
default :
GST_WARNING ( "Unsupported capture dimensions %dx%d" , info -> width , info -> height ) ;
return NO ;
}
return YES ;
}
- ( GstCaps * ) getCaps
{
GstCaps * result ;
NSArray * pixel_formats ;
if ( session = = nil )
return NULL ; / * BaseSrc will return template caps * /
result = gst_caps _new _empty ( ) ;
pixel_formats = output . availableVideoCVPixelFormatTypes ;
2013-11-04 11:14:35 +00:00
if ( captureScreen ) {
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2015-02-25 12:52:21 +00:00
CGRect rect = CGDisplayBounds ( [ self getDisplayIdFromDeviceIndex ] ) ;
2016-08-19 09:46:24 +00:00
float scale = [ self getScaleFactorFromDeviceIndex ] ;
2013-11-04 11:14:35 +00:00
for ( NSNumber * pixel_format in pixel_formats ) {
GstVideoFormat gst_format = [ self getGstVideoFormat : pixel_format ] ;
if ( gst_format ! = GST_VIDEO _FORMAT _UNKNOWN )
gst_caps _append ( result , gst_caps _new _simple ( "video/x-raw" ,
2016-08-19 09:46:24 +00:00
"width" , G_TYPE _INT , ( int ) ( rect . size . width * scale ) ,
"height" , G_TYPE _INT , ( int ) ( rect . size . height * scale ) ,
2013-11-04 11:14:35 +00:00
"format" , G_TYPE _STRING , gst_video _format _to _string ( gst_format ) ,
NULL ) ) ;
}
# else
GST_WARNING ( "Screen capture is not supported by iOS" ) ;
# endif
return result ;
}
2013-10-31 13:03:58 +00:00
@ try {
2015-12-16 02:47:00 +00:00
result = gst_caps _merge ( result , [ self getDeviceCaps ] ) ;
2013-10-31 13:03:58 +00:00
} @ catch ( NSException * exception ) {
2013-11-07 11:29:34 +00:00
if ( ! [ [ exception name ] isEqualToString : NSUndefinedKeyException ] ) {
2019-09-02 19:08:44 +00:00
GST_WARNING ( "An unexcepted error occurred: %s" , [ exception . reason UTF8String ] ) ;
2013-10-31 13:03:58 +00:00
return result ;
}
/ * Fallback on session presets API for iOS < 7.0 * /
[ self getSessionPresetCaps : result ] ;
2012-10-30 18:30:38 +00:00
}
2010-11-04 13:14:09 +00:00
return result ;
}
2013-10-29 15:37:16 +00:00
- ( BOOL ) setCaps : ( GstCaps * ) new_caps
2010-11-04 13:14:09 +00:00
{
2013-10-28 11:53:26 +00:00
GstVideoInfo info ;
2013-10-31 13:03:58 +00:00
BOOL success = YES , * successPtr = & success ;
2013-10-28 11:53:26 +00:00
gst_video _info _init ( & info ) ;
2013-10-29 15:37:16 +00:00
gst_video _info _from _caps ( & info , new_caps ) ;
2013-10-28 11:53:26 +00:00
width = info . width ;
height = info . height ;
format = info . finfo -> format ;
2014-12-17 01:20:54 +00:00
latency = gst_util _uint64 _scale ( GST_SECOND , info . fps_d , info . fps_n ) ;
2010-11-04 13:14:09 +00:00
2013-10-30 14:51:50 +00:00
dispatch_sync ( mainQueue , ^ {
2012-10-30 18:30:38 +00:00
int newformat ;
2013-11-04 11:14:35 +00:00
if ( captureScreen ) {
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2013-11-04 11:14:35 +00:00
AVCaptureScreenInput * screenInput = ( AVCaptureScreenInput * ) input ;
screenInput . minFrameDuration = CMTimeMake ( info . fps_d , info . fps_n ) ;
# else
GST_WARNING ( "Screen capture is not supported by iOS" ) ;
* successPtr = NO ;
return ;
# endif
} else {
@ try {
2013-10-31 13:03:58 +00:00
2013-11-04 11:14:35 +00:00
/ * formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 * /
* successPtr = [ self setDeviceCaps : ( GstVideoInfo * ) & info ] ;
if ( * successPtr ! = YES )
return ;
2013-10-31 13:03:58 +00:00
2013-11-04 11:14:35 +00:00
} @ catch ( NSException * exception ) {
2013-10-31 13:03:58 +00:00
2013-11-07 11:29:34 +00:00
if ( ! [ [ exception name ] isEqualToString : NSUndefinedKeyException ] ) {
2019-09-02 19:08:44 +00:00
GST_WARNING ( "An unexcepted error occurred: %s" , [ exception . reason UTF8String ] ) ;
2013-11-04 11:14:35 +00:00
* successPtr = NO ;
return ;
}
2013-10-31 13:03:58 +00:00
2013-11-04 11:14:35 +00:00
/ * Fallback on session presets API for iOS < 7.0 * /
* successPtr = [ self setSessionPresetCaps : ( GstVideoInfo * ) & info ] ;
if ( * successPtr ! = YES )
return ;
}
2012-10-30 18:30:38 +00:00
}
switch ( format ) {
case GST_VIDEO _FORMAT _NV12 :
newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ;
break ;
case GST_VIDEO _FORMAT _UYVY :
2015-04-20 07:08:23 +00:00
newformat = kCVPixelFormatType_422YpCbCr8 ;
2012-10-30 18:30:38 +00:00
break ;
case GST_VIDEO _FORMAT _YUY2 :
2015-04-20 07:08:23 +00:00
newformat = kCVPixelFormatType_422YpCbCr8 _yuvs ;
2012-10-30 18:30:38 +00:00
break ;
case GST_VIDEO _FORMAT _BGRA :
2015-04-20 07:08:23 +00:00
newformat = kCVPixelFormatType_32BGRA ;
2012-10-30 18:30:38 +00:00
break ;
2010-11-04 13:14:09 +00:00
default :
2013-10-31 13:03:58 +00:00
* successPtr = NO ;
GST_WARNING ( "Unsupported output format %s" ,
gst_video _format _to _string ( format ) ) ;
return ;
2010-11-04 13:14:09 +00:00
}
2015-12-16 02:47:00 +00:00
GST_INFO _OBJECT ( element ,
"width: %d height: %d format: %s" , width , height ,
gst_video _format _to _string ( format ) ) ;
2012-10-30 18:30:38 +00:00
2013-10-31 13:05:40 +00:00
output . videoSettings = [ NSDictionary
dictionaryWithObject : [ NSNumber numberWithInt : newformat ]
forKey : ( NSString * ) kCVPixelBufferPixelFormatTypeKey ] ;
2012-10-30 18:30:38 +00:00
2016-01-29 04:07:59 +00:00
gst_caps _replace ( & caps , new_caps ) ;
GST_INFO _OBJECT ( element , "configured caps %" GST_PTR _FORMAT , caps ) ;
2015-12-16 03:38:44 +00:00
2016-01-06 05:50:00 +00:00
if ( ! [ session isRunning ] )
[ session startRunning ] ;
2013-10-31 13:03:58 +00:00
/ * Unlock device configuration only after session is started so the session
* won ' t reset the capture formats * /
[ device unlockForConfiguration ] ;
2010-11-04 13:14:09 +00:00
} ) ;
2013-10-31 13:03:58 +00:00
return success ;
2010-11-04 13:14:09 +00:00
}
- ( BOOL ) start
{
bufQueueLock = [ [ NSConditionLock alloc ] initWithCondition : NO_BUFFERS ] ;
bufQueue = [ [ NSMutableArray alloc ] initWithCapacity : BUFFER_QUEUE _SIZE ] ;
stopRequest = NO ;
offset = 0 ;
2014-12-17 01:20:54 +00:00
latency = GST_CLOCK _TIME _NONE ;
2010-11-04 13:14:09 +00:00
lastSampling = GST_CLOCK _TIME _NONE ;
count = 0 ;
fps = -1 ;
return YES ;
}
- ( BOOL ) stop
{
2013-10-30 14:51:50 +00:00
dispatch_sync ( mainQueue , ^ { [ session stopRunning ] ; } ) ;
dispatch_sync ( workerQueue , ^ { } ) ;
2010-11-04 13:14:09 +00:00
bufQueueLock = nil ;
bufQueue = nil ;
2015-01-08 13:16:17 +00:00
if ( textureCache )
2019-08-26 06:51:03 +00:00
g_object _unref ( textureCache ) ;
2015-01-08 13:16:17 +00:00
textureCache = NULL ;
2016-01-29 04:07:59 +00:00
if ( ctxh )
gst_gl _context _helper _free ( ctxh ) ;
ctxh = NULL ;
2010-11-04 13:14:09 +00:00
return YES ;
}
- ( BOOL ) query : ( GstQuery * ) query
{
BOOL result = NO ;
if ( GST_QUERY _TYPE ( query ) = = GST_QUERY _LATENCY ) {
2015-02-18 17:06:09 +00:00
if ( device ! = nil && caps ! = NULL ) {
2010-11-04 13:14:09 +00:00
GstClockTime min_latency , max_latency ;
2014-12-17 01:20:54 +00:00
min_latency = max_latency = latency ;
2010-11-04 13:14:09 +00:00
result = YES ;
GST_DEBUG _OBJECT ( element , "reporting latency of min %" GST_TIME _FORMAT
" max %" GST_TIME _FORMAT ,
GST_TIME _ARGS ( min_latency ) , GST_TIME _ARGS ( max_latency ) ) ;
gst_query _set _latency ( query , TRUE , min_latency , max_latency ) ;
}
} else {
result = GST_BASE _SRC _CLASS ( parent_class ) -> query ( baseSrc , query ) ;
}
return result ;
}
- ( BOOL ) unlock
{
[ bufQueueLock lock ] ;
stopRequest = YES ;
[ bufQueueLock unlockWithCondition : HAS_BUFFER _OR _STOP _REQUEST ] ;
return YES ;
}
- ( BOOL ) unlockStop
{
[ bufQueueLock lock ] ;
stopRequest = NO ;
2015-05-07 19:18:27 +00:00
[ bufQueueLock unlockWithCondition : ( [ bufQueue count ] = = 0 ) ? NO_BUFFERS : HAS_BUFFER _OR _STOP _REQUEST ] ;
2010-11-04 13:14:09 +00:00
return YES ;
}
- ( GstStateChangeReturn ) changeState : ( GstStateChange ) transition
{
GstStateChangeReturn ret ;
if ( transition = = GST_STATE _CHANGE _NULL _TO _READY ) {
if ( ! [ self openDevice ] )
return GST_STATE _CHANGE _FAILURE ;
}
ret = GST_ELEMENT _CLASS ( parent_class ) -> change_state ( element , transition ) ;
if ( transition = = GST_STATE _CHANGE _READY _TO _NULL )
[ self closeDevice ] ;
return ret ;
}
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) sampleBuffer
2015-04-17 02:12:10 +00:00
fromConnection : ( AVCaptureConnection * ) aConnection
2010-11-04 13:14:09 +00:00
{
2014-12-26 11:45:41 +00:00
GstClockTime timestamp , duration ;
2010-11-04 13:14:09 +00:00
[ bufQueueLock lock ] ;
if ( stopRequest ) {
[ bufQueueLock unlock ] ;
return ;
}
2014-12-26 11:45:41 +00:00
[ self getSampleBuffer : sampleBuffer timestamp : & timestamp duration : & duration ] ;
2015-04-14 22:24:45 +00:00
if ( timestamp = = GST_CLOCK _TIME _NONE ) {
2015-05-07 19:18:27 +00:00
[ bufQueueLock unlockWithCondition : ( [ bufQueue count ] = = 0 ) ? NO_BUFFERS : HAS_BUFFER _OR _STOP _REQUEST ] ;
2015-04-14 22:24:45 +00:00
return ;
}
2010-11-04 13:14:09 +00:00
if ( [ bufQueue count ] = = BUFFER_QUEUE _SIZE )
[ bufQueue removeLastObject ] ;
2017-02-03 13:46:39 +00:00
[ bufQueue insertObject : @ { @ "sbuf" : ( __bridge id ) sampleBuffer ,
2014-12-26 11:45:41 +00:00
@ "timestamp" : @ ( timestamp ) ,
@ "duration" : @ ( duration ) }
2010-11-04 13:14:09 +00:00
atIndex : 0 ] ;
[ bufQueueLock unlockWithCondition : HAS_BUFFER _OR _STOP _REQUEST ] ;
}
- ( GstFlowReturn ) create : ( GstBuffer * * ) buf
{
CMSampleBufferRef sbuf ;
2013-10-29 15:37:16 +00:00
CVImageBufferRef image_buf ;
CVPixelBufferRef pixel_buf ;
size_t cur_width , cur_height ;
2014-12-26 11:45:41 +00:00
GstClockTime timestamp , duration ;
2010-11-04 13:14:09 +00:00
[ bufQueueLock lockWhenCondition : HAS_BUFFER _OR _STOP _REQUEST ] ;
if ( stopRequest ) {
[ bufQueueLock unlock ] ;
2012-02-08 15:37:13 +00:00
return GST_FLOW _FLUSHING ;
2010-11-04 13:14:09 +00:00
}
2014-12-26 11:45:41 +00:00
NSDictionary * dic = ( NSDictionary * ) [ bufQueue lastObject ] ;
2017-02-03 13:46:39 +00:00
sbuf = ( __bridge CMSampleBufferRef ) dic [ @ "sbuf" ] ;
2014-12-26 11:45:41 +00:00
timestamp = ( GstClockTime ) [ dic [ @ "timestamp" ] longLongValue ] ;
duration = ( GstClockTime ) [ dic [ @ "duration" ] longLongValue ] ;
2010-11-04 13:14:09 +00:00
CFRetain ( sbuf ) ;
[ bufQueue removeLastObject ] ;
[ bufQueueLock unlockWithCondition :
( [ bufQueue count ] = = 0 ) ? NO_BUFFERS : HAS_BUFFER _OR _STOP _REQUEST ] ;
2013-10-29 15:37:16 +00:00
/ * Check output frame size dimensions * /
image_buf = CMSampleBufferGetImageBuffer ( sbuf ) ;
if ( image_buf ) {
pixel_buf = ( CVPixelBufferRef ) image_buf ;
cur_width = CVPixelBufferGetWidth ( pixel_buf ) ;
cur_height = CVPixelBufferGetHeight ( pixel_buf ) ;
if ( width ! = cur_width || height ! = cur_height ) {
/ * Set new caps according to current frame dimensions * /
GST_WARNING ( "Output frame size has changed %dx%d -> %dx%d, updating caps" ,
width , height , ( int ) cur_width , ( int ) cur_height ) ;
width = cur_width ;
height = cur_height ;
gst_caps _set _simple ( caps ,
"width" , G_TYPE _INT , width ,
"height" , G_TYPE _INT , height ,
NULL ) ;
gst_pad _push _event ( GST_BASE _SINK _PAD ( baseSrc ) , gst_event _new _caps ( caps ) ) ;
}
}
2016-08-26 07:37:54 +00:00
* buf = gst_core _media _buffer _new ( sbuf , useVideoMeta , textureCache ) ;
2015-04-18 22:21:40 +00:00
if ( * buf = = NULL ) {
CFRelease ( sbuf ) ;
return GST_FLOW _ERROR ;
}
2010-11-04 13:14:09 +00:00
CFRelease ( sbuf ) ;
2014-12-26 11:45:41 +00:00
GST_BUFFER _OFFSET ( * buf ) = offset + + ;
2015-04-05 02:37:09 +00:00
GST_BUFFER _OFFSET _END ( * buf ) = GST_BUFFER _OFFSET ( * buf ) + 1 ;
2014-12-26 11:45:41 +00:00
GST_BUFFER _TIMESTAMP ( * buf ) = timestamp ;
GST_BUFFER _DURATION ( * buf ) = duration ;
2010-11-04 13:14:09 +00:00
if ( doStats )
[ self updateStatistics ] ;
return GST_FLOW _OK ;
}
2015-12-16 03:38:44 +00:00
- ( GstCaps * ) fixate : ( GstCaps * ) new_caps
{
2015-12-28 23:57:36 +00:00
GstStructure * structure ;
2015-12-16 03:38:44 +00:00
new_caps = gst_caps _make _writable ( new_caps ) ;
2015-12-28 23:57:36 +00:00
new_caps = gst_caps _truncate ( new_caps ) ;
structure = gst_caps _get _structure ( new_caps , 0 ) ;
/ * crank up to 11. This is what the presets do , but we don ' t use the presets
* in ios >= 7.0 * /
gst_structure _fixate _field _nearest _int ( structure , "height" , G_MAXINT ) ;
2022-08-25 12:24:25 +00:00
gst_structure _fixate _field _nearest _fraction ( structure , "framerate" , 30 , 1 ) ;
2015-12-28 23:57:36 +00:00
2015-12-16 03:38:44 +00:00
return gst_caps _fixate ( new_caps ) ;
}
2016-01-29 04:07:59 +00:00
- ( BOOL ) decideAllocation : ( GstQuery * ) query
{
GstCaps * alloc_caps ;
GstCapsFeatures * features ;
gboolean ret ;
ret = GST_BASE _SRC _CLASS ( parent_class ) -> decide_allocation ( baseSrc , query ) ;
if ( ! ret )
return ret ;
gst_query _parse _allocation ( query , & alloc_caps , NULL ) ;
features = gst_caps _get _features ( alloc_caps , 0 ) ;
if ( gst_caps _features _contains ( features , GST_CAPS _FEATURE _MEMORY _GL _MEMORY ) ) {
2019-08-28 08:59:35 +00:00
GstVideoTextureCacheGL * cache_gl ;
cache_gl = textureCache ? GST_VIDEO _TEXTURE _CACHE _GL ( textureCache ) : NULL ;
2016-01-29 04:07:59 +00:00
gst_gl _context _helper _ensure _context ( ctxh ) ;
GST_INFO _OBJECT ( element , "pushing textures, context %p old context %p" ,
2019-08-28 08:59:35 +00:00
ctxh -> context , cache_gl ? cache_gl -> ctx : NULL ) ;
if ( cache_gl && cache_gl -> ctx ! = ctxh -> context ) {
2019-08-26 06:51:03 +00:00
g_object _unref ( textureCache ) ;
2016-01-29 04:07:59 +00:00
textureCache = NULL ;
}
2019-08-28 10:09:39 +00:00
if ( ! textureCache )
2019-08-28 08:59:35 +00:00
textureCache = gst_video _texture _cache _gl _new ( ctxh -> context ) ;
2016-01-29 04:07:59 +00:00
gst_video _texture _cache _set _format ( textureCache , format , alloc_caps ) ;
}
return TRUE ;
}
2016-02-09 02:42:48 +00:00
- ( void ) setContext : ( GstContext * ) context
{
GST_INFO _OBJECT ( element , "setting context %s" ,
gst_context _get _context _type ( context ) ) ;
gst_gl _handle _set _context ( element , context ,
& ctxh -> display , & ctxh -> other_context ) ;
GST_ELEMENT _CLASS ( parent_class ) -> set_context ( element , context ) ;
}
2014-12-26 11:45:41 +00:00
- ( void ) getSampleBuffer : ( CMSampleBufferRef ) sbuf
timestamp : ( GstClockTime * ) outTimestamp
duration : ( GstClockTime * ) outDuration
2010-11-04 13:14:09 +00:00
{
2014-12-17 01:20:54 +00:00
CMSampleTimingInfo time_info ;
2015-04-14 08:05:31 +00:00
GstClockTime timestamp , avf_timestamp , duration , input_clock _now , input_clock _diff , running_time ;
2014-12-17 01:20:54 +00:00
CMItemCount num_timings ;
2014-12-26 11:45:41 +00:00
GstClock * clock ;
CMTime now ;
2014-12-17 01:20:54 +00:00
timestamp = GST_CLOCK _TIME _NONE ;
duration = GST_CLOCK _TIME _NONE ;
if ( CMSampleBufferGetOutputSampleTimingInfoArray ( sbuf , 1 , & time_info , & num_timings ) = = noErr ) {
2015-04-14 08:05:31 +00:00
avf_timestamp = gst_util _uint64 _scale ( GST_SECOND ,
2014-12-17 01:20:54 +00:00
time_info . presentationTimeStamp . value , time_info . presentationTimeStamp . timescale ) ;
2015-02-12 09:11:19 +00:00
if ( CMTIME_IS _VALID ( time_info . duration ) && time_info . duration . timescale ! = 0 )
duration = gst_util _uint64 _scale ( GST_SECOND ,
time_info . duration . value , time_info . duration . timescale ) ;
2014-12-17 01:20:54 +00:00
2014-12-26 11:45:41 +00:00
now = CMClockGetTime ( inputClock ) ;
2015-04-13 08:55:42 +00:00
input_clock _now = gst_util _uint64 _scale ( GST_SECOND ,
2014-12-26 11:45:41 +00:00
now . value , now . timescale ) ;
2015-04-14 08:05:31 +00:00
input_clock _diff = input_clock _now - avf_timestamp ;
2010-11-04 13:14:09 +00:00
2014-12-26 11:45:41 +00:00
GST_OBJECT _LOCK ( element ) ;
clock = GST_ELEMENT _CLOCK ( element ) ;
2015-04-13 08:55:42 +00:00
if ( clock ) {
running_time = gst_clock _get _time ( clock ) - element -> base_time ;
/ * We use presentationTimeStamp to determine how much time it took
* between capturing and receiving the frame in our delegate
* ( e . g . how long it spent in AVF queues ) , then we subtract that time
* from our running time to get the actual timestamp .
* /
if ( running_time >= input_clock _diff )
timestamp = running_time - input_clock _diff ;
else
timestamp = running_time ;
2015-04-14 08:05:31 +00:00
GST_DEBUG _OBJECT ( element , "AVF clock: %" GST_TIME _FORMAT ", AVF PTS: %" GST_TIME _FORMAT
", AVF clock diff: %" GST_TIME _FORMAT
", running time: %" GST_TIME _FORMAT ", out PTS: %" GST_TIME _FORMAT ,
GST_TIME _ARGS ( input_clock _now ) , GST_TIME _ARGS ( avf_timestamp ) ,
GST_TIME _ARGS ( input_clock _diff ) ,
GST_TIME _ARGS ( running_time ) , GST_TIME _ARGS ( timestamp ) ) ;
2015-04-13 08:55:42 +00:00
} else {
/ * no clock , can ' t set timestamps * /
timestamp = GST_CLOCK _TIME _NONE ;
}
2014-12-26 11:45:41 +00:00
GST_OBJECT _UNLOCK ( element ) ;
2010-11-04 13:14:09 +00:00
}
2014-12-26 11:45:41 +00:00
* outTimestamp = timestamp ;
* outDuration = duration ;
2010-11-04 13:14:09 +00:00
}
- ( void ) updateStatistics
{
GstClock * clock ;
GST_OBJECT _LOCK ( element ) ;
clock = GST_ELEMENT _CLOCK ( element ) ;
if ( clock ! = NULL )
gst_object _ref ( clock ) ;
GST_OBJECT _UNLOCK ( element ) ;
if ( clock ! = NULL ) {
GstClockTime now = gst_clock _get _time ( clock ) ;
gst_object _unref ( clock ) ;
count + + ;
if ( GST_CLOCK _TIME _IS _VALID ( lastSampling ) ) {
if ( now - lastSampling >= GST_SECOND ) {
GST_OBJECT _LOCK ( element ) ;
fps = count ;
GST_OBJECT _UNLOCK ( element ) ;
g_object _notify ( G_OBJECT ( element ) , "fps" ) ;
lastSampling = now ;
count = 0 ;
}
} else {
lastSampling = now ;
}
}
}
@ end
/ *
* Glue code
* /
enum
{
PROP_0 ,
PROP_DEVICE _INDEX ,
2019-01-22 12:52:55 +00:00
PROP_DEVICE _NAME ,
2017-02-08 11:23:09 +00:00
PROP_POSITION ,
PROP_ORIENTATION ,
PROP_DEVICE _TYPE ,
2010-11-04 13:14:09 +00:00
PROP_DO _STATS ,
2013-11-04 11:14:35 +00:00
PROP_FPS ,
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2013-11-04 11:14:35 +00:00
PROP_CAPTURE _SCREEN ,
PROP_CAPTURE _SCREEN _CURSOR ,
PROP_CAPTURE _SCREEN _MOUSE _CLICKS ,
# endif
2010-11-04 13:14:09 +00:00
} ;
static void gst_avf _video _src _finalize ( GObject * obj ) ;
static void gst_avf _video _src _get _property ( GObject * object , guint prop_id ,
GValue * value , GParamSpec * pspec ) ;
static void gst_avf _video _src _set _property ( GObject * object , guint prop_id ,
const GValue * value , GParamSpec * pspec ) ;
static GstStateChangeReturn gst_avf _video _src _change _state (
GstElement * element , GstStateChange transition ) ;
2013-10-28 11:53:26 +00:00
static GstCaps * gst_avf _video _src _get _caps ( GstBaseSrc * basesrc ,
GstCaps * filter ) ;
2010-11-04 13:14:09 +00:00
static gboolean gst_avf _video _src _set _caps ( GstBaseSrc * basesrc ,
GstCaps * caps ) ;
static gboolean gst_avf _video _src _start ( GstBaseSrc * basesrc ) ;
static gboolean gst_avf _video _src _stop ( GstBaseSrc * basesrc ) ;
static gboolean gst_avf _video _src _query ( GstBaseSrc * basesrc ,
GstQuery * query ) ;
static gboolean gst_avf _video _src _unlock ( GstBaseSrc * basesrc ) ;
static gboolean gst_avf _video _src _unlock _stop ( GstBaseSrc * basesrc ) ;
static GstFlowReturn gst_avf _video _src _create ( GstPushSrc * pushsrc ,
GstBuffer * * buf ) ;
2015-12-17 03:49:13 +00:00
static GstCaps * gst_avf _video _src _fixate ( GstBaseSrc * bsrc ,
GstCaps * caps ) ;
2016-01-29 04:07:59 +00:00
static gboolean gst_avf _video _src _decide _allocation ( GstBaseSrc * bsrc ,
GstQuery * query ) ;
2016-02-09 02:42:48 +00:00
static void gst_avf _video _src _set _context ( GstElement * element ,
GstContext * context ) ;
2010-11-04 13:14:09 +00:00
static void
gst_avf _video _src _class _init ( GstAVFVideoSrcClass * klass )
{
GObjectClass * gobject_class = G_OBJECT _CLASS ( klass ) ;
GstElementClass * gstelement_class = GST_ELEMENT _CLASS ( klass ) ;
GstBaseSrcClass * gstbasesrc_class = GST_BASE _SRC _CLASS ( klass ) ;
GstPushSrcClass * gstpushsrc_class = GST_PUSH _SRC _CLASS ( klass ) ;
gobject_class -> finalize = gst_avf _video _src _finalize ;
gobject_class -> get_property = gst_avf _video _src _get _property ;
gobject_class -> set_property = gst_avf _video _src _set _property ;
gstelement_class -> change_state = gst_avf _video _src _change _state ;
2016-02-09 02:42:48 +00:00
gstelement_class -> set_context = gst_avf _video _src _set _context ;
2010-11-04 13:14:09 +00:00
gstbasesrc_class -> get_caps = gst_avf _video _src _get _caps ;
gstbasesrc_class -> set_caps = gst_avf _video _src _set _caps ;
gstbasesrc_class -> start = gst_avf _video _src _start ;
gstbasesrc_class -> stop = gst_avf _video _src _stop ;
gstbasesrc_class -> query = gst_avf _video _src _query ;
gstbasesrc_class -> unlock = gst_avf _video _src _unlock ;
gstbasesrc_class -> unlock_stop = gst_avf _video _src _unlock _stop ;
2015-12-17 03:49:13 +00:00
gstbasesrc_class -> fixate = gst_avf _video _src _fixate ;
2016-01-29 04:07:59 +00:00
gstbasesrc_class -> decide_allocation = gst_avf _video _src _decide _allocation ;
2010-11-04 13:14:09 +00:00
gstpushsrc_class -> create = gst_avf _video _src _create ;
2013-10-28 11:53:26 +00:00
gst_element _class _set _metadata ( gstelement_class ,
2019-02-14 09:49:03 +00:00
"Video Source (AVFoundation)" , "Source/Video/Hardware" ,
2013-10-28 11:53:26 +00:00
"Reads frames from an iOS AVFoundation device" ,
"Ole André Vadla Ravnås <oleavr@soundrop.com>" ) ;
2016-03-04 06:50:26 +00:00
gst_element _class _add _static _pad _template ( gstelement_class , & src_template ) ;
2013-10-28 11:53:26 +00:00
2010-11-04 13:14:09 +00:00
g_object _class _install _property ( gobject_class , PROP_DEVICE _INDEX ,
g_param _spec _int ( "device-index" , "Device Index" ,
"The zero-based device index" ,
-1 , G_MAXINT , DEFAULT_DEVICE _INDEX ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
2019-01-22 12:52:55 +00:00
g_object _class _install _property ( gobject_class , PROP_DEVICE _NAME ,
g_param _spec _string ( "device-name" , "Device Name" ,
"The name of the currently opened capture device" ,
NULL , G_PARAM _READABLE | G_PARAM _STATIC _STRINGS ) ) ;
2017-02-08 11:23:09 +00:00
g_object _class _install _property ( gobject_class , PROP_POSITION ,
g_param _spec _enum ( "position" , "Position" ,
"The position of the capture device (front or back-facing)" ,
GST_TYPE _AVF _VIDEO _SOURCE _POSITION , DEFAULT_POSITION ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
g_object _class _install _property ( gobject_class , PROP_ORIENTATION ,
g_param _spec _enum ( "orientation" , "Orientation" ,
"The orientation of the video" ,
GST_TYPE _AVF _VIDEO _SOURCE _ORIENTATION , DEFAULT_ORIENTATION ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
g_object _class _install _property ( gobject_class , PROP_DEVICE _TYPE ,
g_param _spec _enum ( "device-type" , "Device Type" ,
"The general type of a video capture device" ,
GST_TYPE _AVF _VIDEO _SOURCE _DEVICE _TYPE , DEFAULT_DEVICE _TYPE ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
2010-11-04 13:14:09 +00:00
g_object _class _install _property ( gobject_class , PROP_DO _STATS ,
g_param _spec _boolean ( "do-stats" , "Enable statistics" ,
"Enable logging of statistics" , DEFAULT_DO _STATS ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
g_object _class _install _property ( gobject_class , PROP_FPS ,
g_param _spec _int ( "fps" , "Frames per second" ,
"Last measured framerate, if statistics are enabled" ,
-1 , G_MAXINT , -1 , G_PARAM _READABLE | G_PARAM _STATIC _STRINGS ) ) ;
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2013-11-04 11:14:35 +00:00
g_object _class _install _property ( gobject_class , PROP_CAPTURE _SCREEN ,
g_param _spec _boolean ( "capture-screen" , "Enable screen capture" ,
"Enable screen capture functionality" , FALSE ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
g_object _class _install _property ( gobject_class , PROP_CAPTURE _SCREEN _CURSOR ,
g_param _spec _boolean ( "capture-screen-cursor" , "Capture screen cursor" ,
"Enable cursor capture while capturing screen" , FALSE ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
g_object _class _install _property ( gobject_class , PROP_CAPTURE _SCREEN _MOUSE _CLICKS ,
g_param _spec _boolean ( "capture-screen-mouse-clicks" , "Enable mouse clicks capture" ,
"Enable mouse clicks capture while capturing screen" , FALSE ,
G_PARAM _READWRITE | G_PARAM _STATIC _STRINGS ) ) ;
# endif
2010-11-04 13:14:09 +00:00
GST_DEBUG _CATEGORY _INIT ( gst_avf _video _src _debug , "avfvideosrc" ,
0 , "iOS AVFoundation video source" ) ;
2022-01-28 17:45:28 +00:00
gst_type _mark _as _plugin _api ( GST_TYPE _AVF _VIDEO _SOURCE _POSITION , 0 ) ;
gst_type _mark _as _plugin _api ( GST_TYPE _AVF _VIDEO _SOURCE _ORIENTATION , 0 ) ;
gst_type _mark _as _plugin _api ( GST_TYPE _AVF _VIDEO _SOURCE _DEVICE _TYPE , 0 ) ;
2010-11-04 13:14:09 +00:00
}
static void
2013-10-28 11:53:26 +00:00
gst_avf _video _src _init ( GstAVFVideoSrc * src )
2010-11-04 13:14:09 +00:00
{
2017-02-03 13:46:39 +00:00
src -> impl = ( __bridge _retained gpointer ) [ [ GstAVFVideoSrcImpl alloc ] initWithSrc : GST_PUSH _SRC ( src ) ] ;
2010-11-04 13:14:09 +00:00
}
static void
gst_avf _video _src _finalize ( GObject * obj )
{
2017-02-03 13:46:39 +00:00
CFBridgingRelease ( GST_AVF _VIDEO _SRC _CAST ( obj ) -> impl ) ;
2010-11-04 13:14:09 +00:00
G_OBJECT _CLASS ( parent_class ) -> finalize ( obj ) ;
}
static void
gst_avf _video _src _get _property ( GObject * object , guint prop_id , GValue * value ,
GParamSpec * pspec )
{
GstAVFVideoSrcImpl * impl = GST_AVF _VIDEO _SRC _IMPL ( object ) ;
switch ( prop_id ) {
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2013-11-04 11:14:35 +00:00
case PROP_CAPTURE _SCREEN :
g_value _set _boolean ( value , impl . captureScreen ) ;
break ;
case PROP_CAPTURE _SCREEN _CURSOR :
g_value _set _boolean ( value , impl . captureScreenCursor ) ;
break ;
case PROP_CAPTURE _SCREEN _MOUSE _CLICKS :
g_value _set _boolean ( value , impl . captureScreenMouseClicks ) ;
break ;
# endif
2010-11-04 13:14:09 +00:00
case PROP_DEVICE _INDEX :
g_value _set _int ( value , impl . deviceIndex ) ;
break ;
2019-01-22 12:52:55 +00:00
case PROP_DEVICE _NAME :
g_value _set _string ( value , impl . deviceName ) ;
break ;
2017-02-08 11:23:09 +00:00
case PROP_POSITION :
g_value _set _enum ( value , impl . position ) ;
break ;
case PROP_ORIENTATION :
g_value _set _enum ( value , impl . orientation ) ;
break ;
case PROP_DEVICE _TYPE :
g_value _set _enum ( value , impl . deviceType ) ;
break ;
2010-11-04 13:14:09 +00:00
case PROP_DO _STATS :
g_value _set _boolean ( value , impl . doStats ) ;
break ;
case PROP_FPS :
GST_OBJECT _LOCK ( object ) ;
g_value _set _int ( value , impl . fps ) ;
GST_OBJECT _UNLOCK ( object ) ;
break ;
default :
G_OBJECT _WARN _INVALID _PROPERTY _ID ( object , prop_id , pspec ) ;
break ;
}
}
static void
gst_avf _video _src _set _property ( GObject * object , guint prop_id ,
const GValue * value , GParamSpec * pspec )
{
GstAVFVideoSrcImpl * impl = GST_AVF _VIDEO _SRC _IMPL ( object ) ;
switch ( prop_id ) {
2013-11-07 00:38:20 +00:00
# if ! HAVE_IOS
2013-11-04 11:14:35 +00:00
case PROP_CAPTURE _SCREEN :
impl . captureScreen = g_value _get _boolean ( value ) ;
break ;
case PROP_CAPTURE _SCREEN _CURSOR :
impl . captureScreenCursor = g_value _get _boolean ( value ) ;
break ;
case PROP_CAPTURE _SCREEN _MOUSE _CLICKS :
impl . captureScreenMouseClicks = g_value _get _boolean ( value ) ;
break ;
# endif
2010-11-04 13:14:09 +00:00
case PROP_DEVICE _INDEX :
impl . deviceIndex = g_value _get _int ( value ) ;
break ;
2017-02-08 11:23:09 +00:00
case PROP_POSITION :
impl . position = g_value _get _enum ( value ) ;
break ;
case PROP_ORIENTATION :
impl . orientation = g_value _get _enum ( value ) ;
break ;
case PROP_DEVICE _TYPE :
impl . deviceType = g_value _get _enum ( value ) ;
break ;
2010-11-04 13:14:09 +00:00
case PROP_DO _STATS :
impl . doStats = g_value _get _boolean ( value ) ;
break ;
default :
G_OBJECT _WARN _INVALID _PROPERTY _ID ( object , prop_id , pspec ) ;
break ;
}
}
static GstStateChangeReturn
gst_avf _video _src _change _state ( GstElement * element , GstStateChange transition )
{
GstStateChangeReturn ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( element ) changeState : transition ] ;
return ret ;
}
static GstCaps *
2013-10-28 11:53:26 +00:00
gst_avf _video _src _get _caps ( GstBaseSrc * basesrc , GstCaps * filter )
2010-11-04 13:14:09 +00:00
{
GstCaps * ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) getCaps ] ;
return ret ;
}
static gboolean
gst_avf _video _src _set _caps ( GstBaseSrc * basesrc , GstCaps * caps )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) setCaps : caps ] ;
return ret ;
}
static gboolean
gst_avf _video _src _start ( GstBaseSrc * basesrc )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) start ] ;
return ret ;
}
static gboolean
gst_avf _video _src _stop ( GstBaseSrc * basesrc )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) stop ] ;
return ret ;
}
static gboolean
gst_avf _video _src _query ( GstBaseSrc * basesrc , GstQuery * query )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) query : query ] ;
return ret ;
}
static gboolean
gst_avf _video _src _unlock ( GstBaseSrc * basesrc )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) unlock ] ;
return ret ;
}
static gboolean
gst_avf _video _src _unlock _stop ( GstBaseSrc * basesrc )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( basesrc ) unlockStop ] ;
return ret ;
}
static GstFlowReturn
gst_avf _video _src _create ( GstPushSrc * pushsrc , GstBuffer * * buf )
{
GstFlowReturn ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( pushsrc ) create : buf ] ;
return ret ;
}
2014-11-09 09:25:25 +00:00
2015-12-16 03:38:44 +00:00
static GstCaps *
2015-12-17 03:49:13 +00:00
gst_avf _video _src _fixate ( GstBaseSrc * bsrc , GstCaps * caps )
2015-12-16 03:38:44 +00:00
{
GstCaps * ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( bsrc ) fixate : caps ] ;
return ret ;
}
2016-01-29 04:07:59 +00:00
static gboolean
gst_avf _video _src _decide _allocation ( GstBaseSrc * bsrc ,
GstQuery * query )
{
gboolean ret ;
ret = [ GST_AVF _VIDEO _SRC _IMPL ( bsrc ) decideAllocation : query ] ;
return ret ;
}
2016-02-09 02:42:48 +00:00
static void
gst_avf _video _src _set _context ( GstElement * element , GstContext * context )
{
[ GST_AVF _VIDEO _SRC _IMPL ( element ) setContext : context ] ;
}
2019-01-27 03:09:59 +00:00
GstCaps *
gst_av _capture _device _get _caps ( AVCaptureDevice * device , AVCaptureVideoDataOutput * output , GstAVFVideoSourceOrientation orientation )
{
NSArray * formats = [ device valueForKey : @ "formats" ] ;
NSArray * pixel_formats = output . availableVideoCVPixelFormatTypes ;
GstCaps * result_caps , * result_gl _caps ;
# if ! HAVE_IOS
GstVideoFormat gl_format = GST_VIDEO _FORMAT _UYVY ;
# else
GstVideoFormat gl_format = GST_VIDEO _FORMAT _NV12 ;
# endif
result_caps = gst_caps _new _empty ( ) ;
result_gl _caps = gst_caps _new _empty ( ) ;
/ * Do not use AVCaptureDeviceFormat or AVFrameRateRange only
* available in iOS >= 7.0 . We use a dynamic approach with key - value
* coding or performSelector * /
for ( NSObject * f in [ formats reverseObjectEnumerator ] ) {
/ * formatDescription can ' t be retrieved with valueForKey so use a selector here * /
CMFormatDescriptionRef formatDescription = ( __bridge CMFormatDescriptionRef ) [ f performSelector : @ selector ( formatDescription ) ] ;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions ( formatDescription ) ;
dimensions = get_oriented _dimensions ( orientation , dimensions ) ;
for ( NSObject * rate in [ f valueForKey : @ "videoSupportedFrameRateRanges" ] ) {
int min_fps _n , min_fps _d , max_fps _n , max_fps _d ;
gdouble min_fps , max_fps ;
[ [ rate valueForKey : @ "minFrameRate" ] getValue : & min_fps ] ;
gst_util _double _to _fraction ( min_fps , & min_fps _n , & min_fps _d ) ;
[ [ rate valueForKey : @ "maxFrameRate" ] getValue : & max_fps ] ;
gst_util _double _to _fraction ( max_fps , & max_fps _n , & max_fps _d ) ;
for ( NSNumber * pixel_format in pixel_formats ) {
GstVideoFormat gst_format = get_gst _video _format ( pixel_format ) ;
if ( gst_format ! = GST_VIDEO _FORMAT _UNKNOWN ) {
if ( min_fps ! = max_fps )
gst_caps _append ( result_caps , GST_AVF _FPS _RANGE _CAPS _NEW ( gst_format , dimensions . width , dimensions . height , min_fps _n , min_fps _d , max_fps _n , max_fps _d ) ) ;
else
gst_caps _append ( result_caps , GST_AVF _CAPS _NEW ( gst_format , dimensions . width , dimensions . height , max_fps _n , max_fps _d ) ) ;
}
if ( gst_format = = gl_format ) {
GstCaps * gl_caps ;
if ( min_fps ! = max_fps ) {
gl_caps = GST_AVF _FPS _RANGE _CAPS _NEW ( gl_format ,
dimensions . width , dimensions . height ,
min_fps _n , min_fps _d ,
max_fps _n , max_fps _d ) ;
} else {
gl_caps = GST_AVF _CAPS _NEW ( gl_format ,
dimensions . width , dimensions . height ,
max_fps _n , max_fps _d ) ;
}
gst_caps _set _features ( gl_caps , 0 ,
gst_caps _features _new ( GST_CAPS _FEATURE _MEMORY _GL _MEMORY ,
NULL ) ) ;
gst_caps _set _simple ( gl_caps ,
"texture-target" , G_TYPE _STRING ,
# if ! HAVE_IOS
GST_GL _TEXTURE _TARGET _RECTANGLE _STR ,
# else
GST_GL _TEXTURE _TARGET _2D _STR ,
# endif
NULL ) ;
gst_caps _append ( result_gl _caps , gl_caps ) ;
}
}
}
}
result_gl _caps = gst_caps _simplify ( gst_caps _merge ( result_gl _caps , result_caps ) ) ;
return result_gl _caps ;
}
static GstVideoFormat
get_gst _video _format ( NSNumber * pixel_format )
{
GstVideoFormat gst_format = GST_VIDEO _FORMAT _UNKNOWN ;
switch ( [ pixel_format integerValue ] ) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : / * 420 v * /
gst_format = GST_VIDEO _FORMAT _NV12 ;
break ;
case kCVPixelFormatType_422YpCbCr8 : / * 2 vuy * /
gst_format = GST_VIDEO _FORMAT _UYVY ;
break ;
case kCVPixelFormatType_32BGRA : / * BGRA * /
gst_format = GST_VIDEO _FORMAT _BGRA ;
break ;
case kCVPixelFormatType_422YpCbCr8 _yuvs : / * yuvs * /
gst_format = GST_VIDEO _FORMAT _YUY2 ;
break ;
default :
break ;
}
return gst_format ;
}
static CMVideoDimensions
get_oriented _dimensions ( GstAVFVideoSourceOrientation orientation , CMVideoDimensions dimensions )
{
CMVideoDimensions orientedDimensions ;
if ( orientation = = GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT _UPSIDE _DOWN ||
orientation = = GST_AVF _VIDEO _SOURCE _ORIENTATION _PORTRAIT ) {
orientedDimensions . width = dimensions . height ;
orientedDimensions . height = dimensions . width ;
} else {
orientedDimensions = dimensions ;
}
return orientedDimensions ;
}