avfvideosrc: Negotiate format, also support more resolutions

This should enable supprt for old and new iPhones with the same code.
This commit is contained in:
Olivier Crête 2012-10-30 19:30:38 +01:00
parent 701fd9b336
commit 9a328a8f71

View file

@ -27,8 +27,6 @@
#define DEFAULT_DEVICE_INDEX -1 #define DEFAULT_DEVICE_INDEX -1
#define DEFAULT_DO_STATS FALSE #define DEFAULT_DO_STATS FALSE
#define DEVICE_VIDEO_FORMAT GST_VIDEO_FORMAT_YUY2
#define DEVICE_YUV_FOURCC "YUY2"
#define DEVICE_FPS_N 25 #define DEVICE_FPS_N 25
#define DEVICE_FPS_D 1 #define DEVICE_FPS_D 1
@ -37,11 +35,36 @@
GST_DEBUG_CATEGORY (gst_avf_video_src_debug); GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
#define GST_CAT_DEFAULT gst_avf_video_src_debug #define GST_CAT_DEFAULT gst_avf_video_src_debug
#define VIDEO_CAPS_YUV(width, height) "video/x-raw-yuv, " \
"format = (fourcc) { NV12, UYVY, YUY2 }, " \
"framerate = " GST_VIDEO_FPS_RANGE ", " \
"width = (int) " G_STRINGIFY (width) ", height = (int) " G_STRINGIFY (height)
#define VIDEO_CAPS_BGRA(width, height) "video/x-raw-rgb, " \
"bpp = (int) 32, " \
"depth = (int) 32, " \
"endianness = (int) BIG_ENDIAN, " \
"red_mask = (int) " GST_VIDEO_BYTE3_MASK_32 ", " \
"green_mask = (int) " GST_VIDEO_BYTE2_MASK_32 ", " \
"blue_mask = (int) " GST_VIDEO_BYTE1_MASK_32 ", " \
"alpha_mask = (int) " GST_VIDEO_BYTE4_MASK_32 ", " \
"width = (int) " G_STRINGIFY (width) ", height = (int) " G_STRINGIFY (height)
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS ( GST_STATIC_CAPS (VIDEO_CAPS_YUV (192, 144) ";"
GST_VIDEO_CAPS_YUV (DEVICE_YUV_FOURCC)) VIDEO_CAPS_YUV (480, 360) ";"
VIDEO_CAPS_YUV (352, 288) ";"
VIDEO_CAPS_YUV (640, 480) ";"
VIDEO_CAPS_YUV (1280, 720) ";"
VIDEO_CAPS_YUV (1920, 1280) ";"
VIDEO_CAPS_BGRA (192, 144) ";"
VIDEO_CAPS_BGRA (480, 360) ";"
VIDEO_CAPS_BGRA (352, 288) ";"
VIDEO_CAPS_BGRA (640, 480) ";"
VIDEO_CAPS_BGRA (1280, 720) ";"
VIDEO_CAPS_BGRA (1920, 1280))
); );
typedef enum _QueueState { typedef enum _QueueState {
@ -71,6 +94,7 @@ static GstPushSrcClass * parent_class;
NSMutableArray *bufQueue; NSMutableArray *bufQueue;
BOOL stopRequest; BOOL stopRequest;
GstVideoFormat format;
gint width, height; gint width, height;
GstClockTime duration; GstClockTime duration;
guint64 offset; guint64 offset;
@ -249,43 +273,74 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
bufferFactory = nil; bufferFactory = nil;
} }
#define GST_AVF_CAPS_NEW(w, h) \ #define GST_AVF_CAPS_NEW(format, w, h) \
(gst_video_format_new_caps (DEVICE_VIDEO_FORMAT, w, h, \ (gst_video_format_new_caps (format, w, h, \
DEVICE_FPS_N, DEVICE_FPS_D, 1, 1)) DEVICE_FPS_N, DEVICE_FPS_D, 1, 1))
- (GstCaps *)getCaps - (GstCaps *)getCaps
{ {
GstCaps *result; GstCaps *result;
NSArray *formats;
if (session == nil) if (session == nil)
return NULL; /* BaseSrc will return template caps */ return NULL; /* BaseSrc will return template caps */
result = gst_caps_new_empty ();
result = GST_AVF_CAPS_NEW (192, 144); formats = output.availableVideoCVPixelFormatTypes;
if ([session canSetSessionPreset:AVCaptureSessionPresetMedium]) for (id object in formats) {
gst_caps_merge (result, GST_AVF_CAPS_NEW (480, 360)); NSNumber *nsformat = object;
if ([session canSetSessionPreset:AVCaptureSessionPreset640x480]) GstVideoFormat gstformat = GST_VIDEO_FORMAT_UNKNOWN;
gst_caps_merge (result, GST_AVF_CAPS_NEW (640, 480));
if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720]) switch ([nsformat integerValue]) {
gst_caps_merge (result, GST_AVF_CAPS_NEW (1280, 720)); case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
gstformat = GST_VIDEO_FORMAT_NV12;
break;
case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
gstformat = GST_VIDEO_FORMAT_UYVY;
break;
case kCVPixelFormatType_32BGRA: /* BGRA */
gstformat = GST_VIDEO_FORMAT_BGRA;
break;
case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
gstformat = GST_VIDEO_FORMAT_YUY2;
break;
default:
continue;
}
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 192, 144));
if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 352, 288));
if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 480, 360));
if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 640, 480));
if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 1280, 720));
if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 1920, 1080));
}
return result; return result;
} }
- (BOOL)setCaps:(GstCaps *)caps - (BOOL)setCaps:(GstCaps *)caps
{ {
GstStructure *s; gst_video_format_parse_caps (caps, &format, &width, &height);
s = gst_caps_get_structure (caps, 0);
gst_structure_get_int (s, "width", &width);
gst_structure_get_int (s, "height", &height);
dispatch_async (mainQueue, ^{ dispatch_async (mainQueue, ^{
int newformat;
g_assert (![session isRunning]); g_assert (![session isRunning]);
switch (width) { switch (width) {
case 192: case 192:
session.sessionPreset = AVCaptureSessionPresetLow; session.sessionPreset = AVCaptureSessionPresetLow;
break; break;
case 352:
session.sessionPreset = AVCaptureSessionPreset352x288;
break;
case 480: case 480:
session.sessionPreset = AVCaptureSessionPresetMedium; session.sessionPreset = AVCaptureSessionPresetMedium;
break; break;
@ -295,10 +350,38 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
case 1280: case 1280:
session.sessionPreset = AVCaptureSessionPreset1280x720; session.sessionPreset = AVCaptureSessionPreset1280x720;
break; break;
case 1920:
session.sessionPreset = AVCaptureSessionPreset1920x1080;
break;
default: default:
g_assert_not_reached (); g_assert_not_reached ();
} }
switch (format) {
case GST_VIDEO_FORMAT_NV12:
newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
break;
case GST_VIDEO_FORMAT_UYVY:
newformat = kCVPixelFormatType_422YpCbCr8;
break;
case GST_VIDEO_FORMAT_YUY2:
newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
break;
case GST_VIDEO_FORMAT_BGRA:
newformat = kCVPixelFormatType_32BGRA;
break;
default:
g_assert_not_reached ();
}
GST_DEBUG_OBJECT(element,
"Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
width, height,
GST_FOURCC_ARGS (gst_video_format_to_fourc(format)));
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:newformat] forKey:(NSString*)kCVPixelBu
[session startRunning]; [session startRunning];
}); });
[self waitForMainQueueToDrain]; [self waitForMainQueueToDrain];