applemedia: changes to make GL memory mappable on CPU on iOS

This commit introduces IOSGLMemory which is a GLMemory that falls back to
GstAppleCoreVideoMemory for CPU access. This is a temporary solution until
IOSurface gets exposed as a public framework on iOS and so we can use
IOSurfaceMemory on both MacOS and iOS.

https://bugzilla.gnome.org/show_bug.cgi?id=769210
This commit is contained in:
Alessandro Decina 2016-08-26 17:37:54 +10:00
parent 91fea30ff4
commit 7898bc5810
18 changed files with 473 additions and 179 deletions

View file

@ -78,12 +78,14 @@ noinst_HEADERS = \
iosassetsrc.h \
iosurfacememory.h \
avfassetsrc.h \
glcontexthelper.h
glcontexthelper.h \
iosglmemory.h
if HAVE_IOS
libgstapplemedia_la_SOURCES += \
iosassetsrc.m
iosassetsrc.m \
iosglmemory.c
libgstapplemedia_la_LDFLAGS += \
-Wl,-framework -Wl,Foundation \
@ -92,7 +94,7 @@ libgstapplemedia_la_LDFLAGS += \
else
libgstapplemedia_la_SOURCES += \
qtkitvideosrc.m \
qtkitvideosrc.m \
iosurfacememory.c
libgstapplemedia_la_LDFLAGS += \

View file

@ -1071,7 +1071,7 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return NULL;
}
buf = gst_core_media_buffer_new (cmbuf, FALSE);
buf = gst_core_media_buffer_new (cmbuf, FALSE, NULL);
CFRelease (cmbuf);
if (buf == NULL)
return NULL;

View file

@ -954,19 +954,13 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
}
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
if (*buf == NULL) {
CFRelease (sbuf);
return GST_FLOW_ERROR;
}
CFRelease (sbuf);
if (textureCache != NULL) {
*buf = gst_video_texture_cache_get_gl_buffer (textureCache, *buf);
if (*buf == NULL)
return GST_FLOW_ERROR;
}
GST_BUFFER_OFFSET (*buf) = offset++;
GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
GST_BUFFER_TIMESTAMP (*buf) = timestamp;

View file

@ -240,7 +240,7 @@ gst_video_info_init_from_pixel_buffer (GstVideoInfo * info,
GstBuffer *
gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
gboolean use_video_meta)
gboolean use_video_meta, GstVideoTextureCache * cache)
{
CVImageBufferRef image_buf;
CMBlockBufferRef block_buf;
@ -262,7 +262,8 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
goto error;
}
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, &has_padding);
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, cache,
&has_padding);
/* If the video meta API is not supported, remove padding by
* copying the core media buffer to a system memory buffer */

View file

@ -22,6 +22,7 @@
#include <gst/gst.h>
#include <gst/video/gstvideometa.h>
#include "videotexturecache.h"
#include "CoreMedia/CoreMedia.h"
@ -43,7 +44,8 @@ typedef struct _GstCoreMediaMeta
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
gboolean use_video_meta);
gboolean use_video_meta,
GstVideoTextureCache *cache);
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
(GstBuffer * buf);
GType gst_core_media_meta_api_get_type (void);

View file

@ -17,8 +17,14 @@
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "corevideobuffer.h"
#include "corevideomemory.h"
#if !HAVE_IOS
#include "iosurfacememory.h"
#endif
static const GstMetaInfo *gst_core_video_meta_get_info (void);
@ -93,18 +99,46 @@ gst_core_video_meta_get_info (void)
return core_video_meta_info;
}
static GstMemory *
_create_glmem (GstAppleCoreVideoPixelBuffer * gpixbuf,
GstVideoInfo * info, guint plane, gsize size, GstVideoTextureCache * cache)
{
#if HAVE_IOS
return gst_video_texture_cache_create_memory (cache, gpixbuf, plane, size);
#else
GstIOSurfaceMemory *mem;
GstVideoGLTextureType tex_type =
gst_gl_texture_type_from_format (cache->ctx, GST_VIDEO_INFO_FORMAT (info),
plane);
CVPixelBufferRef pixel_buf = gpixbuf->buf;
IOSurfaceRef surface = CVPixelBufferGetIOSurface (pixel_buf);
CFRetain (pixel_buf);
mem = gst_io_surface_memory_wrapped (cache->ctx,
surface, GST_GL_TEXTURE_TARGET_RECTANGLE, tex_type,
info, plane, NULL, pixel_buf, (GDestroyNotify) CFRelease);
return GST_MEMORY_CAST (mem);
#endif
}
void
gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
CVPixelBufferRef pixel_buf, gboolean * has_padding)
gst_core_video_wrap_pixel_buffer (GstBuffer * buf,
GstVideoInfo * info,
CVPixelBufferRef pixel_buf,
GstVideoTextureCache * cache, gboolean * has_padding)
{
guint n_planes;
gsize offset[GST_VIDEO_MAX_PLANES] = { 0 };
gint stride[GST_VIDEO_MAX_PLANES] = { 0 };
UInt32 size;
GstAppleCoreVideoPixelBuffer *gpixbuf;
GstMemory *mem = NULL;
gboolean do_gl = cache != NULL;
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
*has_padding = FALSE;
if (has_padding)
*has_padding = FALSE;
if (CVPixelBufferIsPlanar (pixel_buf)) {
gint i, size = 0, plane_offset = 0;
@ -113,16 +147,20 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
for (i = 0; i < n_planes; i++) {
stride[i] = CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, i);
if (stride[i] != GST_VIDEO_INFO_PLANE_STRIDE (info, i)) {
if (stride[i] != GST_VIDEO_INFO_PLANE_STRIDE (info, i) && has_padding)
*has_padding = TRUE;
}
size = stride[i] * CVPixelBufferGetHeightOfPlane (pixel_buf, i);
offset[i] = plane_offset;
plane_offset += size;
gst_buffer_append_memory (buf,
gst_apple_core_video_memory_new_wrapped (gpixbuf, i, size));
if (do_gl)
mem = _create_glmem (gpixbuf, info, i, size, cache);
else
mem =
GST_MEMORY_CAST (gst_apple_core_video_memory_new_wrapped (gpixbuf,
i, size));
gst_buffer_append_memory (buf, mem);
}
} else {
n_planes = 1;
@ -130,9 +168,13 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
offset[0] = 0;
size = stride[0] * CVPixelBufferGetHeight (pixel_buf);
gst_buffer_append_memory (buf,
gst_apple_core_video_memory_new_wrapped (gpixbuf,
GST_APPLE_CORE_VIDEO_NO_PLANE, size));
if (do_gl)
mem = _create_glmem (gpixbuf, info, 0, size, cache);
else
mem =
GST_MEMORY_CAST (gst_apple_core_video_memory_new_wrapped (gpixbuf, 0,
size));
gst_buffer_append_memory (buf, mem);
}
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
@ -147,13 +189,58 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
}
}
static GstVideoFormat
gst_core_video_get_video_format (OSType format)
{
switch (format) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
return GST_VIDEO_FORMAT_NV12;
case kCVPixelFormatType_422YpCbCr8_yuvs:
return GST_VIDEO_FORMAT_YUY2;
case kCVPixelFormatType_422YpCbCr8:
return GST_VIDEO_FORMAT_UYVY;
case kCVPixelFormatType_32BGRA:
return GST_VIDEO_FORMAT_BGRA;
case kCVPixelFormatType_32RGBA:
return GST_VIDEO_FORMAT_RGBA;
default:
GST_WARNING ("Unknown OSType format: %d", (gint) format);
return GST_VIDEO_FORMAT_UNKNOWN;
}
}
gboolean
gst_core_video_info_init_from_pixel_buffer (GstVideoInfo * info,
CVPixelBufferRef pixel_buf)
{
size_t width, height;
OSType format_type;
GstVideoFormat video_format;
width = CVPixelBufferGetWidth (pixel_buf);
height = CVPixelBufferGetHeight (pixel_buf);
format_type = CVPixelBufferGetPixelFormatType (pixel_buf);
video_format = gst_core_video_get_video_format (format_type);
if (video_format == GST_VIDEO_FORMAT_UNKNOWN) {
return FALSE;
}
gst_video_info_init (info);
gst_video_info_set_format (info, video_format, width, height);
return TRUE;
}
GstBuffer *
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo)
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo,
GstVideoTextureCache * cache)
{
CVPixelBufferRef pixbuf = NULL;
GstBuffer *buf;
GstCoreVideoMeta *meta;
gboolean has_padding; /* not used for now */
if (CFGetTypeID (cvbuf) != CVPixelBufferGetTypeID ())
/* TODO: Do we need to handle other buffer types? */
@ -169,7 +256,7 @@ gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo)
meta->cvbuf = CVBufferRetain (cvbuf);
meta->pixbuf = pixbuf;
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, &has_padding);
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, cache, NULL);
return buf;
}

View file

@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include "videotexturecache.h"
#include "CoreVideo/CoreVideo.h"
@ -41,10 +42,14 @@ typedef struct _GstCoreVideoMeta
} GstCoreVideoMeta;
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf,
GstVideoInfo *info);
GstVideoInfo *info,
GstVideoTextureCache *cache);
gboolean gst_core_video_info_init_from_pixel_buffer (GstVideoInfo * info,
CVPixelBufferRef pixel_buf);
void gst_core_video_wrap_pixel_buffer (GstBuffer * buf,
GstVideoInfo * info,
CVPixelBufferRef pixel_buf,
GstVideoTextureCache *cache,
gboolean * has_padding);
GType gst_core_video_meta_api_get_type (void);

View file

@ -300,7 +300,7 @@ gst_is_apple_core_video_memory (GstMemory * mem)
* Helper function for gst_apple_core_video_mem_share().
* Users should call gst_apple_core_video_memory_new_wrapped() instead.
*/
static GstMemory *
static GstAppleCoreVideoMemory *
gst_apple_core_video_memory_new (GstMemoryFlags flags, GstMemory * parent,
GstAppleCoreVideoPixelBuffer * gpixbuf, gsize plane, gsize maxsize,
gsize align, gsize offset, gsize size)
@ -326,12 +326,12 @@ gst_apple_core_video_memory_new (GstMemoryFlags flags, GstMemory * parent,
/**
* gst_apple_core_video_memory_new_wrapped:
* @gpixbuf: the backing #GstAppleCoreVideoPixelBuffer
* @plane: the plane this memory will represent, or #GST_APPLE_CORE_VIDEO_NO_PLANE for non-planar buffer
* @plane: the plane this memory will represent, or 0 for non-planar buffer
* @size: the size of the buffer or specific plane
*
* Returns: a newly allocated #GstAppleCoreVideoMemory
*/
GstMemory *
GstAppleCoreVideoMemory *
gst_apple_core_video_memory_new_wrapped (GstAppleCoreVideoPixelBuffer * gpixbuf,
gsize plane, gsize size)
{
@ -349,7 +349,7 @@ gst_apple_core_video_mem_map (GstMemory * gmem, gsize maxsize,
if (!gst_apple_core_video_pixel_buffer_lock (mem->gpixbuf, flags))
return NULL;
if (mem->plane != GST_APPLE_CORE_VIDEO_NO_PLANE) {
if (CVPixelBufferIsPlanar (mem->gpixbuf->buf)) {
ret = CVPixelBufferGetBaseAddressOfPlane (mem->gpixbuf->buf, mem->plane);
if (ret != NULL)
@ -378,11 +378,8 @@ gst_apple_core_video_mem_unmap (GstMemory * gmem)
{
GstAppleCoreVideoMemory *mem = (GstAppleCoreVideoMemory *) gmem;
(void) gst_apple_core_video_pixel_buffer_unlock (mem->gpixbuf);
if (mem->plane != GST_APPLE_CORE_VIDEO_NO_PLANE)
GST_DEBUG ("%p: pixbuf %p plane %" G_GSIZE_FORMAT, mem,
mem->gpixbuf->buf, mem->plane);
else
GST_DEBUG ("%p: pixbuf %p", mem, mem->gpixbuf->buf);
GST_DEBUG ("%p: pixbuf %p plane %" G_GSIZE_FORMAT, mem,
mem->gpixbuf->buf, mem->plane);
}
static GstMemory *
@ -403,9 +400,9 @@ gst_apple_core_video_mem_share (GstMemory * gmem, gssize offset, gssize size)
/* the shared memory is always readonly */
sub =
gst_apple_core_video_memory_new (GST_MINI_OBJECT_FLAGS (parent) |
GST_MINI_OBJECT_FLAG_LOCK_READONLY, parent, mem->gpixbuf, mem->plane,
gmem->maxsize, gmem->align, gmem->offset + offset, size);
GST_MEMORY_CAST (gst_apple_core_video_memory_new (GST_MINI_OBJECT_FLAGS
(parent) | GST_MINI_OBJECT_FLAG_LOCK_READONLY, parent, mem->gpixbuf,
mem->plane, gmem->maxsize, gmem->align, gmem->offset + offset, size));
return sub;
}

View file

@ -62,13 +62,6 @@ typedef struct
guint lock_count;
} GstAppleCoreVideoPixelBuffer;
/**
* GST_APPLE_CORE_VIDEO_NO_PLANE:
*
* Indicates a non-planar pixel buffer.
*/
#define GST_APPLE_CORE_VIDEO_NO_PLANE ((size_t)-1)
/**
* GstAppleCoreVideoMemory:
*
@ -101,7 +94,7 @@ gst_apple_core_video_pixel_buffer_unref (GstAppleCoreVideoPixelBuffer * shared);
gboolean
gst_is_apple_core_video_memory (GstMemory * mem);
GstMemory *
GstAppleCoreVideoMemory *
gst_apple_core_video_memory_new_wrapped (GstAppleCoreVideoPixelBuffer * shared, gsize plane, gsize size);
G_END_DECLS

View file

@ -0,0 +1,166 @@
/*
* GStreamer
* Copyright (C) 2015 Alessandro Decina <twi@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "iosglmemory.h"
GST_DEBUG_CATEGORY_STATIC (GST_CAT_IOS_GL_MEMORY);
#define GST_CAT_DEFAULT GST_CAT_IOS_GL_MEMORY
G_DEFINE_TYPE (GstIOSGLMemoryAllocator, gst_ios_gl_memory_allocator,
GST_TYPE_GL_MEMORY_ALLOCATOR);
typedef struct
{
GstIOSGLMemory *memory;
} ContextThreadData;
static GstAllocator *_ios_gl_memory_allocator;
static void
_ios_gl_memory_destroy (GstGLBaseMemory * gl_mem)
{
GstIOSGLMemory *mem = (GstIOSGLMemory *) gl_mem;
gst_memory_unref (GST_MEMORY_CAST (mem->cv_mem));
GST_GL_BASE_MEMORY_ALLOCATOR_CLASS
(gst_ios_gl_memory_allocator_parent_class)->destroy (gl_mem);
}
static gpointer
_ios_gl_memory_allocator_map (GstGLBaseMemory * bmem,
GstMapInfo * info, gsize size)
{
GstGLMemory *gl_mem = (GstGLMemory *) bmem;
GstIOSGLMemory *mem = (GstIOSGLMemory *) gl_mem;
if (info->flags & GST_MAP_GL)
return &gl_mem->tex_id;
return GST_MEMORY_CAST (mem->cv_mem)->allocator->
mem_map (GST_MEMORY_CAST (mem->cv_mem), size, info->flags);
}
static void
_ios_gl_memory_allocator_unmap (GstGLBaseMemory * bmem, GstMapInfo * info)
{
GstIOSGLMemory *mem = (GstIOSGLMemory *) bmem;
if (!(info->flags & GST_MAP_GL))
GST_MEMORY_CAST (mem->cv_mem)->allocator->
mem_unmap (GST_MEMORY_CAST (mem->cv_mem));
}
static GstMemory *
_mem_alloc (GstAllocator * allocator, gsize size, GstAllocationParams * params)
{
g_warning ("use gst_ios_gl_memory_new_wrapped () to allocate from this "
"IOSGL allocator");
return NULL;
}
static void
gst_ios_gl_memory_allocator_class_init (GstIOSGLMemoryAllocatorClass * klass)
{
GstAllocatorClass *allocator_class = (GstAllocatorClass *) klass;
GstGLBaseMemoryAllocatorClass *gl_base_allocator_class =
(GstGLBaseMemoryAllocatorClass *) klass;
allocator_class->alloc = _mem_alloc;
gl_base_allocator_class->destroy = _ios_gl_memory_destroy;
gl_base_allocator_class->map = _ios_gl_memory_allocator_map;
gl_base_allocator_class->unmap = _ios_gl_memory_allocator_unmap;
}
static void
gst_ios_gl_memory_allocator_init (GstIOSGLMemoryAllocator * allocator)
{
GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
alloc->mem_type = GST_IOS_GL_MEMORY_ALLOCATOR_NAME;
GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
}
void
gst_ios_gl_memory_init (void)
{
static volatile gsize _init = 0;
if (g_once_init_enter (&_init)) {
GST_DEBUG_CATEGORY_INIT (GST_CAT_IOS_GL_MEMORY, "iosurface", 0,
"IOSGL Buffer");
_ios_gl_memory_allocator =
g_object_new (GST_TYPE_IOS_GL_MEMORY_ALLOCATOR, NULL);
gst_allocator_register (GST_IOS_GL_MEMORY_ALLOCATOR_NAME,
gst_object_ref (_ios_gl_memory_allocator));
g_once_init_leave (&_init, 1);
}
}
gboolean
gst_is_ios_gl_memory (GstMemory * mem)
{
return mem != NULL && mem->allocator != NULL &&
g_type_is_a (G_OBJECT_TYPE (mem->allocator),
GST_TYPE_IOS_GL_MEMORY_ALLOCATOR);
}
static GstIOSGLMemory *
_ios_gl_memory_new (GstGLContext * context,
GstAppleCoreVideoMemory * cv_mem,
GstGLTextureTarget target,
GstVideoGLTextureType tex_type,
guint tex_id,
GstVideoInfo * info,
guint plane,
GstVideoAlignment * valign, gpointer user_data, GDestroyNotify notify)
{
GstIOSGLMemory *mem;
mem = g_new0 (GstIOSGLMemory, 1);
mem->gl_mem.tex_id = tex_id;
mem->gl_mem.texture_wrapped = TRUE;
gst_gl_memory_init (&mem->gl_mem, _ios_gl_memory_allocator, NULL, context,
target, tex_type, NULL, info, plane, valign, user_data, notify);
mem->cv_mem = cv_mem;
GST_MINI_OBJECT_FLAG_SET (mem, GST_MEMORY_FLAG_READONLY);
return mem;
}
GstIOSGLMemory *
gst_ios_gl_memory_new_wrapped (GstGLContext * context,
GstAppleCoreVideoMemory * cv_mem,
GstGLTextureTarget target,
GstVideoGLTextureType tex_type,
guint tex_id,
GstVideoInfo * info,
guint plane,
GstVideoAlignment * valign, gpointer user_data, GDestroyNotify notify)
{
return _ios_gl_memory_new (context, cv_mem, target, tex_type, tex_id, info,
plane, valign, user_data, notify);
}

View file

@ -0,0 +1,78 @@
/*
* GStreamer
* Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_IOS_GL_MEMORY_H_
#define _GST_IOS_GL_MEMORY_H_
#include <gst/gst.h>
#include <gst/gstallocator.h>
#include <gst/video/video.h>
#include <gst/gl/gl.h>
#include "corevideomemory.h"
G_BEGIN_DECLS
#define GST_TYPE_IOS_GL_MEMORY_ALLOCATOR (gst_ios_gl_memory_allocator_get_type())
GType gst_ios_gl_memory_allocator_get_type(void);
#define GST_IS_IOS_GL_MEMORY_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_IOS_GL_MEMORY_ALLOCATOR))
#define GST_IS_IOS_GL_MEMORY_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_IOS_GL_MEMORY_ALLOCATOR))
#define GST_IOS_GL_MEMORY_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_IOS_GL_MEMORY_ALLOCATOR, GstIOSGLMemoryAllocatorClass))
#define GST_IOS_GL_MEMORY_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_IOS_GL_MEMORY_ALLOCATOR, GstIOSGLMemoryAllocator))
#define GST_IOS_GL_MEMORY_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_IOS_GL_MEMORY_ALLOCATOR, GstIOSGLMemoryAllocatorClass))
#define GST_IOS_GL_MEMORY_ALLOCATOR_CAST(obj) ((GstIOSGLMemoryAllocator *)(obj))
typedef struct _GstIOSGLMemory
{
GstGLMemory gl_mem;
GstAppleCoreVideoMemory *cv_mem;
} GstIOSGLMemory;
#define GST_IOS_GL_MEMORY_ALLOCATOR_NAME "IOSGLMemory"
void gst_ios_gl_memory_init (void);
GstIOSGLMemory *
gst_ios_gl_memory_new_wrapped (GstGLContext * context,
GstAppleCoreVideoMemory *cv_mem,
GstGLTextureTarget target,
GstVideoGLTextureType tex_type,
guint tex_id,
GstVideoInfo * info,
guint plane,
GstVideoAlignment *valign,
gpointer user_data,
GDestroyNotify notify);
gboolean gst_is_ios_gl_memory (GstMemory * mem);
typedef struct _GstIOSGLMemoryAllocator
{
GstGLMemoryAllocator allocator;
} GstIOSGLMemoryAllocator;
typedef struct _GstIOSGLMemoryAllocatorClass
{
GstGLMemoryAllocatorClass parent_class;
} GstIOSGLMemoryAllocatorClass;
G_END_DECLS
#endif /* _GST_IOS_GL_MEMORY_H_ */

View file

@ -30,6 +30,12 @@ GST_DEBUG_CATEGORY_STATIC (GST_CAT_IO_SURFACE_MEMORY);
G_DEFINE_TYPE (GstIOSurfaceMemoryAllocator, gst_io_surface_memory_allocator,
GST_TYPE_GL_MEMORY_ALLOCATOR);
typedef struct
{
GstIOSurfaceMemory *memory;
IOSurfaceRef surface;
} ContextThreadData;
static void _io_surface_memory_set_surface (GstIOSurfaceMemory * memory,
IOSurfaceRef surface);
@ -176,7 +182,7 @@ _io_surface_memory_new (GstGLContext * context,
GST_MINI_OBJECT_FLAG_SET (mem, GST_MEMORY_FLAG_READONLY);
mem->surface = NULL;
_io_surface_memory_set_surface (mem, surface);
gst_io_surface_memory_set_surface (mem, surface);
return mem;
}
@ -232,12 +238,22 @@ _io_surface_memory_set_surface (GstIOSurfaceMemory * memory,
}
}
static void
_do_set_surface (GstGLContext * context, ContextThreadData * data)
{
_io_surface_memory_set_surface (data->memory, data->surface);
}
void
gst_io_surface_memory_set_surface (GstIOSurfaceMemory * memory,
IOSurfaceRef surface)
{
g_return_if_fail (gst_is_io_surface_memory ((GstMemory *) memory));
g_return_if_fail (memory->surface == NULL);
GstGLContext *context;
ContextThreadData data = { memory, surface };
_io_surface_memory_set_surface (memory, surface);
g_return_if_fail (gst_is_io_surface_memory ((GstMemory *) memory));
context = memory->gl_mem.mem.context;
gst_gl_context_thread_add (context,
(GstGLContextThreadFunc) _do_set_surface, &data);
}

View file

@ -67,6 +67,8 @@ plugin_init (GstPlugin * plugin)
gst_apple_core_video_memory_init ();
#ifdef HAVE_IOS
gst_ios_gl_memory_init ();
res &= gst_element_register (plugin, "iosassetsrc", GST_RANK_SECONDARY,
GST_TYPE_IOS_ASSET_SRC);
#else

View file

@ -18,8 +18,8 @@
*/
#include "qtkitvideosrc.h"
#import "corevideobuffer.h"
#include "glcontexthelper.h"
#import <QTKit/QTKit.h>
@ -100,9 +100,12 @@ G_DEFINE_TYPE (GstQTKitVideoSrc, gst_qtkit_video_src, GST_TYPE_PUSH_SRC);
BOOL stopRequest;
gint width, height;
gint fps_n, fps_d;
gint fps_n, fps_d;
GstClockTime duration;
guint64 offset;
GstGLContextHelper *ctxh;
GstVideoTextureCache *textureCache;
GstVideoInfo outputInfo;
}
- (id)init;
@ -149,6 +152,10 @@ G_DEFINE_TYPE (GstQTKitVideoSrc, gst_qtkit_video_src, GST_TYPE_PUSH_SRC);
gst_base_src_set_live (baseSrc, TRUE);
gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
self->ctxh = NULL;
textureCache = NULL;
gst_video_info_init (&outputInfo);
}
return self;
@ -272,6 +279,17 @@ openFailed:
error:nil];
g_assert (success);
gst_gl_context_helper_ensure_context (ctxh);
GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
ctxh->context, textureCache ? textureCache->ctx : NULL);
if (textureCache && textureCache->ctx != ctxh->context) {
gst_video_texture_cache_free (textureCache);
textureCache = NULL;
}
textureCache = gst_video_texture_cache_new (ctxh->context);
gst_video_texture_cache_set_format (textureCache, GST_VIDEO_FORMAT_UYVY, caps);
gst_video_info_set_format (&outputInfo, GST_VIDEO_FORMAT_UYVY, width, height);
[output setDelegate:self];
[session startRunning];
@ -291,6 +309,7 @@ openFailed:
fps_n = 0;
fps_d = 1;
duration = GST_CLOCK_TIME_NONE;
ctxh = gst_gl_context_helper_new (element);
/* this will trigger negotiation and open the device in setCaps */
gst_base_src_start_complete (baseSrc, GST_FLOW_OK);
@ -306,7 +325,6 @@ openFailed:
[[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
}
return YES;
}
@ -320,6 +338,12 @@ openFailed:
[queue release];
queue = nil;
gst_gl_context_helper_free (ctxh);
ctxh = NULL;
if (textureCache)
gst_video_texture_cache_free (textureCache);
textureCache = NULL;
return YES;
}
@ -434,7 +458,7 @@ openFailed:
[queueLock unlockWithCondition:
([queue count] == 0) ? NO_FRAMES : HAS_FRAME_OR_STOP_REQUEST];
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, NULL);
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, &outputInfo, textureCache);
CVBufferRelease (frame);
[self timestampBuffer:*buf];

View file

@ -22,7 +22,7 @@
#include <gst/video/gstvideometa.h>
#include <gst/gl/gstglcontext.h>
#include <CoreVideo/CoreVideo.h>
#include "corevideomemory.h"
G_BEGIN_DECLS
@ -47,8 +47,8 @@ void gst_video_texture_cache_free (GstVideoTextureCache * cache);
void gst_video_texture_cache_set_format (GstVideoTextureCache * cache,
GstVideoFormat in_format, GstCaps * out_caps);
gboolean gst_video_texture_cache_upload (GstVideoGLTextureUploadMeta * meta, guint texture_id[4]);
GstBuffer * gst_video_texture_cache_get_gl_buffer (GstVideoTextureCache * cache,
GstBuffer * cv_buffer);
GstMemory *gst_video_texture_cache_create_memory (GstVideoTextureCache * cache,
GstAppleCoreVideoPixelBuffer *gpixbuf, guint plane, gsize size);
G_END_DECLS

View file

@ -27,6 +27,7 @@
#include <gst/gl/gstglbufferpool.h>
#include "iosurfacememory.h"
#endif
#include "iosglmemory.h"
#include "videotexturecache.h"
#include "coremediabuffer.h"
#include "corevideobuffer.h"
@ -35,8 +36,10 @@
typedef struct _ContextThreadData
{
GstVideoTextureCache *cache;
GstBuffer *input_buffer;
GstBuffer *output_buffer;
GstAppleCoreVideoPixelBuffer *gpixbuf;
guint plane;
gsize size;
GstMemory *memory;
} ContextThreadData;
GstVideoTextureCache *
@ -97,14 +100,12 @@ gst_video_texture_cache_set_format (GstVideoTextureCache * cache,
out_caps = gst_caps_copy (out_caps);
features = gst_caps_get_features (out_caps, 0);
gst_caps_features_add (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
gst_video_info_from_caps (&cache->output_info, out_caps);
in_caps = gst_caps_copy (out_caps);
gst_caps_set_simple (in_caps, "format",
G_TYPE_STRING, gst_video_format_to_string (in_format), NULL);
features = gst_caps_get_features (in_caps, 0);
gst_caps_features_add (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
gst_video_info_from_caps (&cache->input_info, in_caps);
if (cache->in_caps)
@ -127,40 +128,22 @@ gst_video_texture_cache_set_format (GstVideoTextureCache * cache,
#endif
}
static CVPixelBufferRef
cv_pixel_buffer_from_gst_buffer (GstBuffer * buffer)
{
GstCoreMediaMeta *cm_meta =
(GstCoreMediaMeta *) gst_buffer_get_meta (buffer,
gst_core_media_meta_api_get_type ());
GstCoreVideoMeta *cv_meta =
(GstCoreVideoMeta *) gst_buffer_get_meta (buffer,
gst_core_video_meta_api_get_type ());
g_return_val_if_fail (cm_meta || cv_meta, NULL);
return cm_meta ? cm_meta->pixel_buf : cv_meta->pixbuf;
}
#if HAVE_IOS
static void
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
_do_create_memory (GstGLContext * context, ContextThreadData * data)
{
CVOpenGLESTextureRef texture = NULL;
GstVideoTextureCache *cache = data->cache;
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
GstAppleCoreVideoPixelBuffer *gpixbuf = data->gpixbuf;
CVPixelBufferRef pixel_buf = gpixbuf->buf;
guint plane = data->plane;
gssize size = data->size;
GstGLTextureTarget gl_target;
GstGLBaseMemoryAllocator *base_mem_alloc;
GstGLVideoAllocationParams *params;
GstBuffer *output_buffer;
base_mem_alloc = GST_GL_BASE_MEMORY_ALLOCATOR (gst_gl_memory_allocator_get_default (cache->ctx));
output_buffer = gst_buffer_new ();
gst_buffer_copy_into (output_buffer, data->input_buffer, GST_BUFFER_COPY_ALL, 0, -1);
GstAppleCoreVideoMemory *memory;
GstIOSGLMemory *gl_memory;
switch (GST_VIDEO_INFO_FORMAT (&cache->input_info)) {
case GST_VIDEO_FORMAT_BGRA:
/* avfvideosrc does BGRA on iOS when doing GLMemory */
if (CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, GL_TEXTURE_2D, GL_RGBA,
GST_VIDEO_INFO_WIDTH (&cache->input_info),
@ -169,64 +152,36 @@ _do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
goto error;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
params = gst_gl_video_allocation_params_new_wrapped_texture (cache->ctx,
NULL, &cache->input_info, 0, NULL, gl_target,
GST_VIDEO_GL_TEXTURE_TYPE_RGBA, CVOpenGLESTextureGetName (texture),
texture, (GDestroyNotify) CFRelease);
gst_buffer_replace_memory (output_buffer, 0,
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
(GstGLAllocationParams *) params));
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size);
gl_memory = gst_ios_gl_memory_new_wrapped (context, memory,
gl_target, GST_VIDEO_GL_TEXTURE_TYPE_RGBA,
CVOpenGLESTextureGetName (texture),
&cache->input_info,
0, NULL, texture, (GDestroyNotify) CFRelease);
break;
case GST_VIDEO_FORMAT_NV12: {
GstVideoGLTextureType textype;
GLenum texifmt, texfmt;
textype = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE;
texifmt = gst_gl_format_from_gl_texture_type (textype);
texfmt = gst_gl_sized_gl_format_from_gl_format_type (cache->ctx, texifmt, GL_UNSIGNED_BYTE);
/* vtdec does NV12 on iOS when doing GLMemory */
if (CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, GL_TEXTURE_2D, texifmt,
GST_VIDEO_INFO_WIDTH (&cache->input_info),
GST_VIDEO_INFO_HEIGHT (&cache->input_info),
texfmt, GL_UNSIGNED_BYTE, 0, &texture) != kCVReturnSuccess)
goto error;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
params = gst_gl_video_allocation_params_new_wrapped_texture (cache->ctx,
NULL, &cache->input_info, 0, NULL, gl_target, textype,
CVOpenGLESTextureGetName (texture), texture,
(GDestroyNotify) CFRelease);
gst_buffer_replace_memory (output_buffer, 0,
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
(GstGLAllocationParams *) params));
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
textype = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA;
if (plane == 0)
textype = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE;
else
textype = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA;
texifmt = gst_gl_format_from_gl_texture_type (textype);
texfmt = gst_gl_sized_gl_format_from_gl_format_type (cache->ctx, texifmt, GL_UNSIGNED_BYTE);
if (CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, GL_TEXTURE_2D, texifmt,
GST_VIDEO_INFO_WIDTH (&cache->input_info) / 2,
GST_VIDEO_INFO_HEIGHT (&cache->input_info) / 2,
texfmt, GL_UNSIGNED_BYTE, 1, &texture) != kCVReturnSuccess)
GST_VIDEO_INFO_COMP_WIDTH (&cache->input_info, plane),
GST_VIDEO_INFO_COMP_HEIGHT (&cache->input_info, plane),
texfmt, GL_UNSIGNED_BYTE, plane, &texture) != kCVReturnSuccess)
goto error;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
params = gst_gl_video_allocation_params_new_wrapped_texture (cache->ctx,
NULL, &cache->input_info, 1, NULL, gl_target, textype,
CVOpenGLESTextureGetName (texture), texture,
(GDestroyNotify) CFRelease);
gst_buffer_replace_memory (output_buffer, 1,
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
(GstGLAllocationParams *) params));
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size);
gl_memory = gst_ios_gl_memory_new_wrapped (context, memory,
gl_target, textype, CVOpenGLESTextureGetName (texture), &cache->input_info,
plane, NULL, texture, (GDestroyNotify) CFRelease);
break;
}
default:
@ -234,51 +189,27 @@ _do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
goto error;
}
gst_object_unref (base_mem_alloc);
data->output_buffer = output_buffer;
data->memory = GST_MEMORY_CAST (gl_memory);
return;
error:
data->output_buffer = NULL;
}
#else /* !HAVE_IOS */
static void
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
{
GstVideoTextureCache *cache = data->cache;
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
IOSurfaceRef surface = CVPixelBufferGetIOSurface (pixel_buf);
data->output_buffer = gst_buffer_new ();
gst_buffer_copy_into (data->output_buffer, data->input_buffer, GST_BUFFER_COPY_ALL, 0, -1);
for (int i = 0; i < GST_VIDEO_INFO_N_PLANES (&cache->input_info); i++) {
GstIOSurfaceMemory *mem;
GstVideoGLTextureType tex_type =
gst_gl_texture_type_from_format (context,
GST_VIDEO_INFO_FORMAT (&cache->input_info), i);
CFRetain (pixel_buf);
mem = gst_io_surface_memory_wrapped (cache->ctx,
surface, GST_GL_TEXTURE_TARGET_RECTANGLE, tex_type,
&cache->input_info, i, NULL, pixel_buf, (GDestroyNotify) CFRelease);
gst_buffer_replace_memory (data->output_buffer, i, (GstMemory *) mem);
}
data->memory = NULL;
}
#endif
GstBuffer *
gst_video_texture_cache_get_gl_buffer (GstVideoTextureCache * cache,
GstBuffer * cv_buffer)
GstMemory *
gst_video_texture_cache_create_memory (GstVideoTextureCache * cache,
GstAppleCoreVideoPixelBuffer *gpixbuf,
guint plane,
gsize size)
{
ContextThreadData data = {cache, cv_buffer, NULL};
ContextThreadData data = {cache, gpixbuf, plane, size, NULL};
#if HAVE_IOS
gst_gl_context_thread_add (cache->ctx,
(GstGLContextThreadFunc) _do_get_gl_buffer, &data);
(GstGLContextThreadFunc) _do_create_memory, &data);
#endif
gst_buffer_unref (cv_buffer);
return data.output_buffer;
return data.memory;
}

View file

@ -774,7 +774,9 @@ gst_vtdec_session_output_callback (void *decompression_output_ref_con,
GST_WARNING_OBJECT (vtdec, "Output state not configured, release buffer");
frame->flags &= VTDEC_FRAME_FLAG_SKIP;
} else {
buf = gst_core_video_buffer_new (image_buffer, &state->info);
buf =
gst_core_video_buffer_new (image_buffer, &state->info,
vtdec->texture_cache);
gst_video_codec_state_unref (state);
GST_BUFFER_PTS (buf) = pts.value;
GST_BUFFER_DURATION (buf) = duration.value;
@ -818,13 +820,6 @@ gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
while ((g_async_queue_length (vtdec->reorder_queue) >=
vtdec->reorder_queue_length) || drain || flush) {
frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue);
if (frame && frame->output_buffer && vtdec->texture_cache != NULL) {
frame->output_buffer =
gst_video_texture_cache_get_gl_buffer (vtdec->texture_cache,
frame->output_buffer);
if (!frame->output_buffer)
GST_ERROR_OBJECT (vtdec, "couldn't get textures from buffer");
}
/* we need to check this in case reorder_queue_length=0 (jpeg for
* example) or we're draining/flushing

View file

@ -1127,7 +1127,8 @@ gst_vtenc_encode_frame (GstVTEnc * self, GstVideoCodecFrame * frame)
goto cv_error;
}
outbuf = gst_core_video_buffer_new ((CVBufferRef) pbuf, &self->video_info);
outbuf =
gst_core_video_buffer_new ((CVBufferRef) pbuf, &self->video_info, NULL);
if (!gst_video_frame_map (&outframe, &self->video_info, outbuf,
GST_MAP_WRITE)) {
gst_video_frame_unmap (&inframe);
@ -1309,7 +1310,7 @@ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
/* We are dealing with block buffers here, so we don't need
* to enable the use of the video meta API on the core media buffer */
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE);
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, NULL);
beach:
/* needed anyway so the frame will be released */