applemedia: avoid implicit color conversions

Rework the GL texture code a little to avoid implicit color conversion inside
AVF/VT on both iOS and OSX.
This commit is contained in:
Alessandro Decina 2015-04-20 17:08:23 +10:00
parent c98eb6f2f5
commit edf9035d02
4 changed files with 95 additions and 50 deletions

View file

@ -93,6 +93,7 @@ G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
BOOL stopRequest;
GstCaps *caps;
GstVideoFormat internalFormat;
GstVideoFormat format;
gint width, height;
GstClockTime latency;
@ -625,6 +626,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
width = info.width;
height = info.height;
format = info.finfo->format;
internalFormat = GST_VIDEO_FORMAT_UNKNOWN;
latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
dispatch_sync (mainQueue, ^{
@ -664,6 +666,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
}
internalFormat = format;
switch (format) {
case GST_VIDEO_FORMAT_NV12:
newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@ -675,9 +678,14 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
break;
case GST_VIDEO_FORMAT_RGBA:
/* In order to do RGBA, we negotiate BGRA (since RGBA is not supported
* if not in textures) and then we get RGBA textures via
* CVOpenGL*TextureCacheCreateTextureFromImage. Computers. */
#if !HAVE_IOS
newformat = kCVPixelFormatType_422YpCbCr8;
internalFormat = GST_VIDEO_FORMAT_UYVY;
#else
newformat = kCVPixelFormatType_32BGRA;
internalFormat = GST_VIDEO_FORMAT_BGRA;
#endif
break;
case GST_VIDEO_FORMAT_BGRA:
newformat = kCVPixelFormatType_32BGRA;
break;
@ -688,10 +696,10 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
return;
}
GST_DEBUG_OBJECT(element,
"Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
width, height,
GST_FOURCC_ARGS (gst_video_format_to_fourcc (format)));
GST_INFO_OBJECT(element,
"width: %d height: %d format: %s internalFormat: %s", width, height,
gst_video_format_to_string (format),
gst_video_format_to_string (internalFormat));
output.videoSettings = [NSDictionary
dictionaryWithObject:[NSNumber numberWithInt:newformat]
@ -790,10 +798,10 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
gst_query_unref (query);
if (glContext) {
GST_INFO_OBJECT (element, "pushing textures. GL context %p", glContext);
GST_INFO_OBJECT (element, "pushing textures. Internal format %s, context %p",
gst_video_format_to_string (internalFormat), glContext);
textureCache = gst_core_video_texture_cache_new (glContext);
gst_core_video_texture_cache_set_format (textureCache,
"NV12", caps);
gst_core_video_texture_cache_set_format (textureCache, internalFormat, caps);
gst_object_unref (glContext);
} else {
GST_WARNING_OBJECT (element, "got memory:GLMemory caps but not GL context from downstream element");

View file

@ -42,7 +42,7 @@ typedef struct _GstCoreVideoTextureCache
GstCoreVideoTextureCache *gst_core_video_texture_cache_new (GstGLContext * ctx);
void gst_core_video_texture_cache_free (GstCoreVideoTextureCache * cache);
void gst_core_video_texture_cache_set_format (GstCoreVideoTextureCache * cache,
const gchar * input_format, GstCaps * out_caps);
GstVideoFormat in_format, GstCaps * out_caps);
gboolean gst_core_video_texture_cache_upload (GstVideoGLTextureUploadMeta * meta, guint texture_id[4]);
GstBuffer * gst_core_video_texture_cache_get_gl_buffer (GstCoreVideoTextureCache * cache,
GstBuffer * cv_buffer);

View file

@ -84,7 +84,7 @@ gst_core_video_texture_cache_free (GstCoreVideoTextureCache * cache)
void
gst_core_video_texture_cache_set_format (GstCoreVideoTextureCache * cache,
const gchar * input_format, GstCaps * out_caps)
GstVideoFormat in_format, GstCaps * out_caps)
{
GstCaps *in_caps;
GstCapsFeatures *features;
@ -97,7 +97,8 @@ gst_core_video_texture_cache_set_format (GstCoreVideoTextureCache * cache,
gst_video_info_from_caps (&cache->output_info, out_caps);
in_caps = gst_caps_copy (out_caps);
gst_caps_set_simple (in_caps, "format", G_TYPE_STRING, input_format, NULL);
gst_caps_set_simple (in_caps, "format",
G_TYPE_STRING, gst_video_format_to_string (in_format), NULL);
features = gst_caps_get_features (in_caps, 0);
gst_caps_features_add (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
gst_video_info_from_caps (&cache->input_info, in_caps);
@ -127,6 +128,7 @@ static gboolean
gl_mem_from_buffer (GstCoreVideoTextureCache * cache,
GstBuffer * buffer, GstMemory **mem1, GstMemory **mem2)
{
gboolean ret = TRUE;
#if !HAVE_IOS
CVOpenGLTextureRef texture = NULL;
#else
@ -139,7 +141,15 @@ gl_mem_from_buffer (GstCoreVideoTextureCache * cache,
#if !HAVE_IOS
CVOpenGLTextureCacheFlush (cache->cache, 0);
#else
CVOpenGLESTextureCacheFlush (cache->cache, 0);
#endif
switch (GST_VIDEO_INFO_FORMAT (&cache->input_info)) {
#if !HAVE_IOS
case GST_VIDEO_FORMAT_UYVY:
/* both avfvideosrc and vtdec on OSX when doing GLMemory negotiate UYVY
* under the hood, which means a single output texture. */
if (CVOpenGLTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, &texture) != kCVReturnSuccess)
goto error;
@ -147,9 +157,23 @@ gl_mem_from_buffer (GstCoreVideoTextureCache * cache,
*mem1 = (GstMemory *) gst_gl_memory_wrapped_texture (cache->ctx,
CVOpenGLTextureGetName (texture), CVOpenGLTextureGetTarget (texture),
&cache->input_info, 0, NULL, texture, (GDestroyNotify) CFRelease);
break;
#else
CVOpenGLESTextureCacheFlush (cache->cache, 0);
case GST_VIDEO_FORMAT_BGRA:
/* avfvideosrc does BGRA on iOS when doing GLMemory */
if (CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, GL_TEXTURE_2D, GL_RGBA,
GST_VIDEO_INFO_WIDTH (&cache->input_info),
GST_VIDEO_INFO_HEIGHT (&cache->input_info),
GL_RGBA, GL_UNSIGNED_BYTE, 0, &texture) != kCVReturnSuccess)
goto error;
*mem1 = (GstMemory *) gst_gl_memory_wrapped_texture (cache->ctx,
CVOpenGLESTextureGetName (texture), CVOpenGLESTextureGetTarget (texture),
&cache->input_info, 0, NULL, texture, (GDestroyNotify) CFRelease);
break;
case GST_VIDEO_FORMAT_NV12:
/* vtdec does NV12 on iOS when doing GLMemory */
if (CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, GL_TEXTURE_2D, GL_LUMINANCE,
GST_VIDEO_INFO_WIDTH (&cache->input_info),
@ -171,17 +195,24 @@ gl_mem_from_buffer (GstCoreVideoTextureCache * cache,
*mem2 = (GstMemory *) gst_gl_memory_wrapped_texture (cache->ctx,
CVOpenGLESTextureGetName (texture), CVOpenGLESTextureGetTarget (texture),
&cache->input_info, 0, NULL, texture, (GDestroyNotify) CFRelease);
break;
#endif
default:
g_warn_if_reached ();
ret = FALSE;
}
return TRUE;
return ret;
error:
ret = FALSE;
if (*mem1)
gst_memory_unref (*mem1);
if (*mem2)
gst_memory_unref (*mem2);
return FALSE;
return ret;
}
static void

View file

@ -313,7 +313,6 @@ gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
vtdec->format_description = format_description;
output_format = gst_vtdec_negotiate_output_format (vtdec);
if (!gst_vtdec_create_session (vtdec, output_format))
return FALSE;
@ -710,10 +709,17 @@ gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
if (gst_pad_check_reconfigure (decoder->srcpad)) {
gst_video_decoder_negotiate (decoder);
if (vtdec->texture_cache) {
GstVideoFormat internal_format;
GstVideoCodecState *output_state =
gst_video_decoder_get_output_state (decoder);
#ifdef HAVE_IOS
internal_format = GST_VIDEO_FORMAT_NV12;
#else
internal_format = GST_VIDEO_FORMAT_UYVY;
#endif
gst_core_video_texture_cache_set_format (vtdec->texture_cache,
GST_VTDEC_VIDEO_FORMAT_STR, output_state->caps);
internal_format, output_state->caps);
gst_video_codec_state_unref (output_state);
}
}