applemedia: avfvideosrc supports position, device-type, and orientation

Avfvideosrc represents an iphone camera or, on mac, a screencapture session.
The old API allowed you to select an input device by device index only. The new
API adds the ability to select the position (front or back facing) and
device-type (wide angle, telephoto, etc.). Furthermore, you can now specify
the orientation (portrait, landscape, etc.) of the videostream.

https://bugzilla.gnome.org/show_bug.cgi?id=778333
This commit is contained in:
Nick Kallen 2017-02-08 12:23:09 +01:00 committed by Sebastian Dröge
parent 46bbc60d24
commit 0968bad8a4
2 changed files with 215 additions and 14 deletions

View file

@ -42,6 +42,30 @@ G_BEGIN_DECLS
typedef struct _GstAVFVideoSrc GstAVFVideoSrc;
typedef struct _GstAVFVideoSrcClass GstAVFVideoSrcClass;
typedef enum
{
GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT,
GST_AVF_VIDEO_SOURCE_POSITION_FRONT,
GST_AVF_VIDEO_SOURCE_POSITION_BACK,
} GstAVFVideoSourcePosition;
typedef enum
{
GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT,
GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT,
GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN,
GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT,
GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT,
} GstAVFVideoSourceOrientation;
typedef enum
{
GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT,
GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA,
GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA,
GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA,
} GstAVFVideoSourceDeviceType;
struct _GstAVFVideoSrc
{
GstPushSrc push_src;

View file

@ -35,6 +35,9 @@
#include "videotexturecache.h"
#define DEFAULT_DEVICE_INDEX -1
#define DEFAULT_POSITION GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
#define DEFAULT_ORIENTATION GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
#define DEFAULT_DEVICE_TYPE GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
#define DEFAULT_DO_STATS FALSE
#define DEVICE_FPS_N 25
@ -81,12 +84,84 @@ typedef enum _QueueState {
#define gst_avf_video_src_parent_class parent_class
G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
#define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
static GType
gst_avf_video_source_position_get_type (void)
{
static GType avf_video_source_position_type = 0;
if (!avf_video_source_position_type) {
static GEnumValue position_types[] = {
{ GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
{ GST_AVF_VIDEO_SOURCE_POSITION_BACK, "Back-facing camera", "back" },
{ GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT, "Default", "default" },
{ 0, NULL, NULL },
};
avf_video_source_position_type =
g_enum_register_static ("GstAVFVideoSourcePosition",
position_types);
}
return avf_video_source_position_type;
}
#define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
static GType
gst_avf_video_source_orientation_get_type (void)
{
static GType avf_video_source_orientation_type = 0;
if (!avf_video_source_orientation_type) {
static GEnumValue orientation_types[] = {
{ GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
{ GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
{ GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
{ GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
{ GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
{ 0, NULL, NULL },
};
avf_video_source_orientation_type =
g_enum_register_static ("GstAVFVideoSourceOrientation",
orientation_types);
}
return avf_video_source_orientation_type;
}
#define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
static GType
gst_avf_video_source_device_type_get_type (void)
{
static GType avf_video_source_device_type_type = 0;
if (!avf_video_source_device_type_type) {
static GEnumValue device_type_types[] = {
{ GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
{ GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
{ GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
{ GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
{ 0, NULL, NULL },
};
avf_video_source_device_type_type =
g_enum_register_static ("GstAVFVideoSourceDeviceType",
device_type_types);
}
return avf_video_source_device_type_type;
}
@interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
GstElement *element;
GstBaseSrc *baseSrc;
GstPushSrc *pushSrc;
gint deviceIndex;
GstAVFVideoSourcePosition position;
GstAVFVideoSourceOrientation orientation;
GstAVFVideoSourceDeviceType deviceType;
BOOL doStats;
AVCaptureSession *session;
@ -125,6 +200,9 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
- (void)finalize;
@property int deviceIndex;
@property GstAVFVideoSourcePosition position;
@property GstAVFVideoSourceOrientation orientation;
@property GstAVFVideoSourceDeviceType deviceType;
@property BOOL doStats;
@property int fps;
@property BOOL captureScreen;
@ -163,10 +241,50 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
@end
static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
switch (deviceType) {
case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
return AVCaptureDeviceTypeBuiltInWideAngleCamera;
case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
return AVCaptureDeviceTypeBuiltInTelephotoCamera;
case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
return AVCaptureDeviceTypeBuiltInDualCamera;
case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
g_assert_not_reached();
}
}
static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
switch (position) {
case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
return AVCaptureDevicePositionFront;
case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
return AVCaptureDevicePositionBack;
case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
g_assert_not_reached();
}
}
static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
switch (orientation) {
case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
return AVCaptureVideoOrientationPortrait;
case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
return AVCaptureVideoOrientationPortraitUpsideDown;
case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
return AVCaptureVideoOrientationLandscapeLeft;
case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
return AVCaptureVideoOrientationLandscapeRight;
case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
g_assert_not_reached();
}
}
@implementation GstAVFVideoSrcImpl
@synthesize deviceIndex, doStats, fps, captureScreen,
captureScreenCursor, captureScreenMouseClicks;
@synthesize deviceIndex, position, orientation, deviceType, doStats,
fps, captureScreen, captureScreenCursor, captureScreenMouseClicks;
- (id)init
{
@ -181,6 +299,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
pushSrc = src;
deviceIndex = DEFAULT_DEVICE_INDEX;
position = DEFAULT_POSITION;
orientation = DEFAULT_ORIENTATION;
deviceType = DEFAULT_DEVICE_TYPE;
captureScreen = NO;
captureScreenCursor = NO;
captureScreenMouseClicks = NO;
@ -211,13 +332,20 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
NSError *err;
if (deviceIndex == DEFAULT_DEVICE_INDEX) {
device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
device = [AVCaptureDevice
defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
mediaType:mediaType
position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
} else {
device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
}
if (device == nil) {
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
("No video capture devices found"), (NULL));
return NO;
}
} else {
} else { // deviceIndex takes priority over position and deviceType
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
if (deviceIndex >= [devices count]) {
GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
@ -306,6 +434,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
/* retained by session */
connection = [[output connections] firstObject];
if (orientation != DEFAULT_ORIENTATION)
connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
*successPtr = YES;
@ -421,6 +551,20 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
#endif
- (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
{
CMVideoDimensions orientedDimensions;
if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
orientedDimensions.width = dimensions.height;
orientedDimensions.height = dimensions.width;
} else {
orientedDimensions = dimensions;
}
return orientedDimensions;
}
- (GstCaps *)getDeviceCaps
{
NSArray *formats = [device valueForKey:@"formats"];
@ -441,12 +585,11 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
* available in iOS >= 7.0. We use a dynamic approach with key-value
* coding or performSelector */
for (NSObject *f in [formats reverseObjectEnumerator]) {
CMFormatDescriptionRef formatDescription;
CMVideoDimensions dimensions;
/* formatDescription can't be retrieved with valueForKey so use a selector here */
formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
dimensions = [self orientedDimensions:dimensions];
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
gdouble min_fps, max_fps;
@ -514,11 +657,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
if ([device lockForConfiguration:NULL] == YES) {
for (NSObject *f in formats) {
CMFormatDescriptionRef formatDescription;
CMVideoDimensions dimensions;
formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
dimensions = [self orientedDimensions:dimensions];
if (dimensions.width == info->width && dimensions.height == info->height) {
found_format = TRUE;
[device setValue:f forKey:@"activeFormat"];
@ -1108,6 +1249,9 @@ enum
{
PROP_0,
PROP_DEVICE_INDEX,
PROP_POSITION,
PROP_ORIENTATION,
PROP_DEVICE_TYPE,
PROP_DO_STATS,
PROP_FPS,
#if !HAVE_IOS
@ -1183,6 +1327,21 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
"The zero-based device index",
-1, G_MAXINT, DEFAULT_DEVICE_INDEX,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_POSITION,
g_param_spec_enum ("position", "Position",
"The position of the capture device (front or back-facing)",
GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_ORIENTATION,
g_param_spec_enum ("orientation", "Orientation",
"The orientation of the video",
GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
g_param_spec_enum ("device-type", "Device Type",
"The general type of a video capture device",
GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DO_STATS,
g_param_spec_boolean ("do-stats", "Enable statistics",
"Enable logging of statistics", DEFAULT_DO_STATS,
@ -1245,6 +1404,15 @@ gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
case PROP_DEVICE_INDEX:
g_value_set_int (value, impl.deviceIndex);
break;
case PROP_POSITION:
g_value_set_enum(value, impl.position);
break;
case PROP_ORIENTATION:
g_value_set_enum(value, impl.orientation);
break;
case PROP_DEVICE_TYPE:
g_value_set_enum(value, impl.deviceType);
break;
case PROP_DO_STATS:
g_value_set_boolean (value, impl.doStats);
break;
@ -1280,6 +1448,15 @@ gst_avf_video_src_set_property (GObject * object, guint prop_id,
case PROP_DEVICE_INDEX:
impl.deviceIndex = g_value_get_int (value);
break;
case PROP_POSITION:
impl.position = g_value_get_enum(value);
break;
case PROP_ORIENTATION:
impl.orientation = g_value_get_enum(value);
break;
case PROP_DEVICE_TYPE:
impl.deviceType = g_value_get_enum(value);
break;
case PROP_DO_STATS:
impl.doStats = g_value_get_boolean (value);
break;