applemedia/gl: Update code to use ARC

All code interacting with Objective-C objects should now use Automated
Reference Counting rather than manual memory management or Garbage
Collection. Because ARC prohibits C-structs from containing
references to Objective-C objects, all such fields are now typed
'gpointer'. Setting and gettings Objective-C fields on such a
struct now uses explicit __bridge_* calls to tell ARC about
object lifetimes.

https://bugzilla.gnome.org/show_bug.cgi?id=777847
This commit is contained in:
Nick Kallen 2017-02-03 14:46:39 +01:00 committed by Sebastian Dröge
parent 201e71c3aa
commit 46bbc60d24
25 changed files with 244 additions and 302 deletions

View file

@ -61,7 +61,7 @@ struct _GstCAOpenGLLayerSink
GstBuffer *next_buffer;
GstBuffer *next_sync;
GstGLCAOpenGLLayer *layer;
gpointer layer;
gboolean keep_aspect_ratio;

View file

@ -79,7 +79,7 @@ gst_ca_opengl_layer_sink_bin_get_property (GObject * object, guint prop_id,
static void
gst_ca_opengl_layer_sink_bin_init (GstCAOpenGLLayerSinkBin * self)
{
GstGLCAOpenGLLayer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL);
gpointer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL);
g_signal_connect (sink, "notify::layer", G_CALLBACK (_on_notify_layer), self);
@ -266,6 +266,11 @@ gst_ca_opengl_layer_sink_finalize (GObject * object)
g_mutex_clear (&ca_sink->drawing_lock);
if (ca_sink->layer) {
CFRelease(ca_sink->layer);
ca_sink->layer = NULL;
}
GST_DEBUG ("finalized");
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -322,13 +327,16 @@ static void
_create_layer (gpointer data)
{
GstCAOpenGLLayerSink *ca_sink = data;
id layer;
if (!ca_sink->layer) {
ca_sink->layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc]
layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc]
initWithGstGLContext:GST_GL_CONTEXT_COCOA (ca_sink->context)];
[ca_sink->layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
ca_sink->layer = (__bridge_retained gpointer)layer;
[layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
data:ca_sink notify:NULL];
[ca_sink->layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize
[layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize
data:ca_sink notify:NULL];
g_object_notify (G_OBJECT (ca_sink), "layer");
}
@ -507,6 +515,10 @@ gst_ca_opengl_layer_sink_change_state (GstElement * element, GstStateChange tran
break;
}
case GST_STATE_CHANGE_READY_TO_NULL:
if (ca_sink->layer) {
CFRelease(ca_sink->layer);
ca_sink->layer = NULL;
}
break;
default:
break;
@ -698,7 +710,7 @@ gst_ca_opengl_layer_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
/* The layer will automatically call the draw callback to draw the new
* content */
[CATransaction begin];
[ca_sink->layer setNeedsDisplay];
[(__bridge GstGLCAOpenGLLayer *)(ca_sink->layer) setNeedsDisplay];
[CATransaction commit];
GST_TRACE ("post redisplay");

View file

@ -29,6 +29,7 @@ libgstgl_cocoa_la_CFLAGS = \
libgstgl_cocoa_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
-fobjc-arc \
$(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \

View file

@ -52,20 +52,18 @@ _init_debug (void)
gst_object_unref (self->draw_context);
GST_TRACE ("dealloc GstGLCAOpenGLLayer %p context %p", self, self->gst_gl_context);
[super dealloc];
}
static void
_context_ready (gpointer data)
{
GstGLCAOpenGLLayer *ca_layer = data;
GstGLCAOpenGLLayer *ca_layer = (__bridge GstGLCAOpenGLLayer *) data;
g_atomic_int_set (&ca_layer->can_draw, 1);
}
- (id)initWithGstGLContext:(GstGLContextCocoa *)parent_gl_context {
[super init];
self = [super init];
_init_debug();
@ -75,7 +73,7 @@ _context_ready (gpointer data)
self.needsDisplayOnBoundsChange = YES;
gst_gl_window_send_message_async (GST_GL_CONTEXT (parent_gl_context)->window,
(GstGLWindowCB) _context_ready, self, NULL);
(GstGLWindowCB) _context_ready, (__bridge_retained gpointer)self, (GDestroyNotify)CFRelease);
return self;
}
@ -177,7 +175,7 @@ _context_ready (gpointer data)
- (void)setResizeCallback:(GstGLWindowResizeCB)cb data:(gpointer)data
notify:(GDestroyNotify)notify {
if (self->resize_notify)
self->resize_notify (self->resize_notify);
self->resize_notify (self->resize_data);
self->resize_cb = cb;
self->resize_data = data;

View file

@ -174,7 +174,6 @@ gst_gl_context_cocoa_create_context (GstGLContext *context, GstGLAPI gl_api,
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLAPI context_api = GST_GL_API_NONE;
const GLint swapInterval = 1;
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
CGLPixelFormatObj fmt = NULL;
CGLContextObj glContext;
CGLPixelFormatAttribute attribs[] = {
@ -274,7 +273,6 @@ gst_gl_context_cocoa_create_context (GstGLContext *context, GstGLAPI gl_api,
if (window)
gst_object_unref (window);
[pool release];
return TRUE;
@ -282,7 +280,6 @@ error:
{
if (window)
gst_object_unref (window);
[pool release];
return FALSE;
}
}

View file

@ -48,7 +48,6 @@ static GCond nsapp_cond;
static gboolean
gst_gl_display_cocoa_nsapp_iteration (gpointer data)
{
NSAutoreleasePool *pool = nil;
NSEvent *event = nil;
if (![NSThread isMainThread]) {
@ -56,7 +55,6 @@ gst_gl_display_cocoa_nsapp_iteration (gpointer data)
return FALSE;
}
pool = [[NSAutoreleasePool alloc] init];
while ((event = ([NSApp nextEventMatchingMask:NSAnyEventMask
untilDate:[NSDate dateWithTimeIntervalSinceNow:0.05]
@ -64,16 +62,12 @@ gst_gl_display_cocoa_nsapp_iteration (gpointer data)
[NSApp sendEvent:event];
}
[pool release];
return TRUE;
}
static void
gst_gl_display_cocoa_open_and_attach_source (gpointer data)
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
if ([NSThread isMainThread]) {
/* The sharedApplication class method initializes
* the display environment and connects your program
@ -89,8 +83,6 @@ gst_gl_display_cocoa_open_and_attach_source (gpointer data)
GST_DEBUG ("NSApp iteration loop attached, id %d", nsapp_source_id);
}
[pool release];
}
static gboolean

View file

@ -82,8 +82,8 @@ static void gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
struct _GstGLWindowCocoaPrivate
{
GstGLNSWindow *internal_win_id;
NSView *external_view;
gpointer internal_win_id;
gpointer external_view;
gboolean visible;
gint preferred_width;
gint preferred_height;
@ -93,7 +93,7 @@ struct _GstGLWindowCocoaPrivate
/* atomic set when the internal NSView has been created */
int view_ready;
dispatch_queue_t gl_queue;
gpointer gl_queue;
};
static void
@ -128,15 +128,15 @@ gst_gl_window_cocoa_init (GstGLWindowCocoa * window)
window->priv->preferred_width = 320;
window->priv->preferred_height = 240;
window->priv->gl_queue =
dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
window->priv->gl_queue = (__bridge_retained gpointer)
(dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL));
}
static void
gst_gl_window_cocoa_finalize (GObject * object)
{
GstGLWindowCocoa *window = GST_GL_WINDOW_COCOA (object);
dispatch_release (window->priv->gl_queue);
window->priv->gl_queue = NULL;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -156,6 +156,7 @@ gst_gl_window_cocoa_create_window (GstGLWindowCocoa *window_cocoa)
{
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLNSWindow *internal_win_id;
NSRect mainRect = [[NSScreen mainScreen] visibleFrame];
gint h = priv->preferred_height;
gint y = mainRect.size.height > h ? (mainRect.size.height - h) * 0.5 : 0;
@ -168,14 +169,16 @@ gst_gl_window_cocoa_create_window (GstGLWindowCocoa *window_cocoa)
gst_object_unref (context);
priv->internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask:
internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask:
(NSTitledWindowMask | NSClosableWindowMask |
NSResizableWindowMask | NSMiniaturizableWindowMask)
backing: NSBackingStoreBuffered defer: NO screen: nil gstWin: window_cocoa];
GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id);
priv->internal_win_id = (__bridge_retained gpointer)internal_win_id;
[priv->internal_win_id setContentView:glView];
GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id);
[internal_win_id setContentView:glView];
g_atomic_int_set (&window_cocoa->priv->view_ready, 1);
@ -196,10 +199,11 @@ static void
gst_gl_window_cocoa_close (GstGLWindow *window)
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
[[window_cocoa->priv->internal_win_id contentView] removeFromSuperview];
[window_cocoa->priv->internal_win_id release];
window_cocoa->priv->internal_win_id = nil;
[[internal_win_id contentView] removeFromSuperview];
CFBridgingRelease(window_cocoa->priv->internal_win_id);
window_cocoa->priv->internal_win_id = NULL;
}
static guintptr
@ -219,7 +223,7 @@ gst_gl_window_cocoa_set_window_handle (GstGLWindow * window, guintptr handle)
if (priv->internal_win_id) {
if (handle) {
priv->external_view = (NSView *) handle;
priv->external_view = (gpointer)handle;
priv->visible = TRUE;
} else {
/* bring back our internal window */
@ -229,17 +233,22 @@ gst_gl_window_cocoa_set_window_handle (GstGLWindow * window, guintptr handle)
dispatch_async (dispatch_get_main_queue (), ^{
NSView *view = [window_cocoa->priv->internal_win_id contentView];
[window_cocoa->priv->internal_win_id orderOut:window_cocoa->priv->internal_win_id];
GstGLNSWindow *internal_win_id =
(__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
NSView *external_view =
(__bridge NSView *)window_cocoa->priv->external_view;
[window_cocoa->priv->external_view addSubview: view];
NSView *view = [internal_win_id contentView];
[internal_win_id orderOut:internal_win_id];
[view setFrame: [window_cocoa->priv->external_view bounds]];
[external_view addSubview: view];
[view setFrame: [external_view bounds]];
[view setAutoresizingMask: NSViewWidthSizable|NSViewHeightSizable];
});
} else {
/* no internal window yet so delay it to the next drawing */
priv->external_view = (NSView*) handle;
priv->external_view = (gpointer)handle;
priv->visible = FALSE;
}
}
@ -249,11 +258,12 @@ _show_window (gpointer data)
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (data);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG_OBJECT (window_cocoa, "make the window available\n");
[priv->internal_win_id makeMainWindow];
[priv->internal_win_id orderFrontRegardless];
[priv->internal_win_id setViewsNeedDisplay:YES];
[internal_win_id makeMainWindow];
[internal_win_id orderFrontRegardless];
[internal_win_id setViewsNeedDisplay:YES];
priv->visible = TRUE;
}
@ -283,11 +293,13 @@ gst_gl_window_cocoa_queue_resize (GstGLWindow * window)
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return;
view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView];
view = (GstGLNSView *)[internal_win_id contentView];
[view->layer queueResize];
}
@ -297,13 +309,15 @@ gst_gl_window_cocoa_draw (GstGLWindow * window)
{
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
/* As the view is created asynchronously in the main thread we cannot know
* exactly when it will be ready to draw to */
if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return;
view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView];
view = (GstGLNSView *)[internal_win_id contentView];
/* this redraws the GstGLCAOpenGLLayer which calls
* gst_gl_window_cocoa_draw_thread(). Use an explicit CATransaction since we
@ -329,8 +343,9 @@ gst_gl_cocoa_draw_cb (GstGLWindowCocoa *window_cocoa)
{
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (window_cocoa->priv->internal_win_id && ![priv->internal_win_id isClosed]) {
if (internal_win_id && ![internal_win_id isClosed]) {
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
/* draw opengl scene in the back buffer */
@ -342,12 +357,13 @@ gst_gl_cocoa_draw_cb (GstGLWindowCocoa *window_cocoa)
static void
gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height)
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
GstGLWindowCocoa *window_cocoa = view->window_cocoa;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLContext *context = gst_gl_window_get_context (window);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (window_cocoa->priv->internal_win_id && ![window_cocoa->priv->internal_win_id isClosed]) {
if (internal_win_id && ![internal_win_id isClosed]) {
const GstGLFuncs *gl;
NSRect bounds = [view bounds];
NSRect visibleRect = [view visibleRect];
@ -376,7 +392,6 @@ gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height)
}
gst_object_unref (context);
[pool release];
}
static void
@ -386,6 +401,8 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
GstGLWindowCocoa *window_cocoa = (GstGLWindowCocoa *) window;
GstGLContext *context = gst_gl_window_get_context (window);
GThread *thread = gst_gl_context_get_thread (context);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
dispatch_queue_t gl_queue = (__bridge dispatch_queue_t)priv->gl_queue;
if (thread == g_thread_self()) {
/* this case happens for nested calls happening from inside the GCD queue */
@ -394,7 +411,7 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
destroy (data);
gst_object_unref (context);
} else {
dispatch_async (window_cocoa->priv->gl_queue, ^{
dispatch_async (gl_queue, ^{
gst_gl_context_activate (context, TRUE);
gst_object_unref (context);
callback (data);
@ -423,6 +440,9 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
m_isClosed = NO;
window_cocoa = cocoa;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
NSView *external_view = (__bridge NSView *)priv->external_view;
self = [super initWithContentRect: contentRect
styleMask: styleMask backing: bufferingType
@ -436,7 +456,7 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
[self setBackgroundColor:[NSColor blackColor]];
[self orderOut:window_cocoa->priv->internal_win_id];
[self orderOut:internal_win_id];
return self;
}
@ -473,8 +493,10 @@ close_window_cb (gpointer data)
/* Called in the main thread which is never the gl thread */
- (BOOL) windowShouldClose:(id)sender {
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG ("user clicked the close button\n");
[window_cocoa->priv->internal_win_id setClosed];
[internal_win_id setClosed];
gst_gl_window_send_message_async (GST_GL_WINDOW (window_cocoa),
(GstGLWindowCB) close_window_cb, gst_object_ref (window_cocoa),
(GDestroyNotify) gst_object_unref);
@ -509,7 +531,7 @@ close_window_cb (gpointer data)
[self->layer setDrawCallback:(GstGLWindowCB)gst_gl_cocoa_draw_cb
data:window notify:NULL];
[self->layer setResizeCallback:(GstGLWindowResizeCB)gst_gl_cocoa_resize_cb
data:self notify:NULL];
data:(__bridge_retained gpointer)self notify:(GDestroyNotify)CFRelease];
[self setLayerContentsRedrawPolicy:NSViewLayerContentsRedrawOnSetNeedsDisplay];
@ -519,9 +541,7 @@ close_window_cb (gpointer data)
}
- (void) dealloc {
[self->layer release];
[super dealloc];
self->layer = nil;
}
- (void)renewGState {

View file

@ -21,6 +21,7 @@ libgstgl_eagl_la_CFLAGS = \
libgstgl_eagl_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
-fobj-arc \
$(GL_CFLAGS) \
$(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \

View file

@ -33,6 +33,11 @@ G_BEGIN_DECLS
#define GST_IS_GL_CONTEXT_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_CONTEXT_EAGL))
#define GST_GL_CONTEXT_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_CONTEXT_EAGL, GstGLContextEaglClass))
#define GS_GL_CONTEXT_EAGL_CONTEXT(obj) \
((__bridge EAGLContext *)(obj->priv->eagl_context))
#define GS_GL_CONTEXT_EAGL_LAYER(obj) \
((__bridge CAEAGLLayer *)(obj->priv->eagl_layer))
typedef struct _GstGLContextEagl GstGLContextEagl;
typedef struct _GstGLContextEaglPrivate GstGLContextEaglPrivate;
typedef struct _GstGLContextEaglClass GstGLContextEaglClass;

View file

@ -46,10 +46,10 @@ static GstGLPlatform gst_gl_context_eagl_get_gl_platform (GstGLContext *
struct _GstGLContextEaglPrivate
{
EAGLContext *eagl_context;
gpointer eagl_context;
/* Used if we render to a window */
CAEAGLLayer *eagl_layer;
gpointer eagl_layer;
GLuint framebuffer;
GLuint color_renderbuffer;
GLuint depth_renderbuffer;
@ -106,7 +106,7 @@ gst_gl_context_eagl_resize (GstGLContextEagl * eagl_context)
int width, height;
glBindRenderbuffer (GL_RENDERBUFFER, eagl_context->priv->color_renderbuffer);
[eagl_context->priv->eagl_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:eagl_context->priv->eagl_layer];
[GS_GL_CONTEXT_EAGL_CONTEXT(eagl_context) renderbufferStorage:GL_RENDERBUFFER fromDrawable:GS_GL_CONTEXT_EAGL_LAYER(eagl_context)];
glGetRenderbufferParameteriv (GL_RENDERBUFFER,
GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv (GL_RENDERBUFFER,
@ -126,7 +126,7 @@ gst_gl_context_eagl_release_layer (GstGLContext * context)
if (context_eagl->priv->eagl_layer) {
gst_gl_context_eagl_activate (context, TRUE);
[context_eagl->priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil];
[GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil];
glDeleteFramebuffers (1, &context_eagl->priv->framebuffer);
context_eagl->priv->framebuffer = 0;
@ -156,7 +156,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
UIView *window_handle = nil;
GstGLWindow *window = gst_gl_context_get_window (context);
if (window)
window_handle = (UIView *) gst_gl_window_get_window_handle (window);
window_handle = (__bridge UIView *)((void *)gst_gl_window_get_window_handle (window));
if (!window_handle) {
GST_INFO_OBJECT (context, "window handle not set yet, not updating layer");
@ -170,7 +170,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
gst_gl_context_eagl_release_layer (context);
eagl_layer = (CAEAGLLayer *)[window_handle layer];
[EAGLContext setCurrentContext:priv->eagl_context];
[EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)];
/* Allocate framebuffer */
glGenFramebuffers (1, &framebuffer);
@ -178,7 +178,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
/* Allocate color render buffer */
glGenRenderbuffers (1, &color_renderbuffer);
glBindRenderbuffer (GL_RENDERBUFFER, color_renderbuffer);
[priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer];
[GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer];
glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER, color_renderbuffer);
/* Get renderbuffer width/height */
@ -203,7 +203,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
glBindRenderbuffer (GL_RENDERBUFFER, 0);
glBindFramebuffer (GL_FRAMEBUFFER, 0);
priv->eagl_layer = eagl_layer;
priv->eagl_layer = (__bridge_retained gpointer)eagl_layer;
priv->framebuffer = framebuffer;
priv->color_renderbuffer = color_renderbuffer;
priv->depth_renderbuffer = depth_renderbuffer;
@ -222,16 +222,16 @@ gst_gl_context_eagl_create_context (GstGLContext * context, GstGLAPI gl_api,
EAGLSharegroup *share_group;
if (other_context) {
EAGLContext *external_gl_context = (EAGLContext *)
EAGLContext *external_gl_context = (__bridge EAGLContext *)(void *)
gst_gl_context_get_gl_context (other_context);
share_group = [external_gl_context sharegroup];
} else {
share_group = nil;
}
priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group];
priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group];
if (!priv->eagl_context) {
priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group];
priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group];
}
if (!priv->eagl_context) {
g_set_error_literal (error, GST_GL_CONTEXT_ERROR,
@ -240,9 +240,6 @@ gst_gl_context_eagl_create_context (GstGLContext * context, GstGLAPI gl_api,
return FALSE;
}
if (share_group)
[share_group release];
priv->eagl_layer = NULL;
priv->framebuffer = 0;
priv->color_renderbuffer = 0;
@ -266,8 +263,8 @@ gst_gl_context_eagl_destroy_context (GstGLContext * context)
gst_gl_context_eagl_release_layer (context);
[context_eagl->priv->eagl_context release];
context_eagl->priv->eagl_context = nil;
CFRelease(context_eagl->priv->eagl_context);
context_eagl->priv->eagl_context = NULL;
}
static gboolean
@ -284,7 +281,7 @@ gst_gl_context_eagl_choose_format (GstGLContext * context, GError ** error)
return TRUE;
if (window)
window_handle = (UIView *) gst_gl_window_get_window_handle (window);
window_handle = (__bridge UIView *)(void *)gst_gl_window_get_window_handle (window);
if (!window_handle) {
gst_object_unref (window);
@ -341,7 +338,7 @@ gst_gl_context_eagl_swap_buffers (GstGLContext * context)
if (!context_eagl->priv->eagl_layer)
return;
[context_eagl->priv->eagl_context presentRenderbuffer:GL_RENDERBUFFER];
[GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) presentRenderbuffer:GL_RENDERBUFFER];
}
static gboolean
@ -360,7 +357,7 @@ gst_gl_context_eagl_activate (GstGLContext * context, gboolean activate)
}
GST_DEBUG ("Attaching context to thread %p", g_thread_self ());
if ([EAGLContext setCurrentContext:context_eagl->priv->eagl_context] == NO) {
if ([EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)] == NO) {
GST_ERROR ("Couldn't make context current");
return FALSE;
}

View file

@ -33,6 +33,11 @@ G_BEGIN_DECLS
#define GST_IS_GL_WINDOW_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_WINDOW_EAGL))
#define GST_GL_WINDOW_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_WINDOW_EAGL, GstGLWindowEaglClass))
#define GS_GL_WINDOW_EAGL_VIEW(obj) \
((__bridge UIView *)(obj->priv->view))
#define GS_GL_WINDOW_EAGL_QUEUE(obj) \
((__bridge dispatch_queue_t)(obj->priv->gl_queue))
typedef struct _GstGLWindowEagl GstGLWindowEagl;
typedef struct _GstGLWindowEaglPrivate GstGLWindowEaglPrivate;
typedef struct _GstGLWindowEaglClass GstGLWindowEaglClass;

View file

@ -54,10 +54,10 @@ static void gst_gl_window_eagl_send_message_async (GstGLWindow * window,
struct _GstGLWindowEaglPrivate
{
UIView *view;
gpointer view;
gint window_width, window_height;
gint preferred_width, preferred_height;
dispatch_queue_t gl_queue;
gpointer gl_queue;
};
static void
@ -88,14 +88,14 @@ gst_gl_window_eagl_init (GstGLWindowEagl * window)
{
window->priv = GST_GL_WINDOW_EAGL_GET_PRIVATE (window);
window->priv->gl_queue =
dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
(__bridge_retained gpointer)dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
}
static void
gst_gl_window_eagl_finalize (GObject * object)
{
GstGLWindowEagl *window = GST_GL_WINDOW_EAGL (object);
dispatch_release (window->priv->gl_queue);
CFRelease(window->priv->gl_queue);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -128,7 +128,7 @@ gst_gl_window_eagl_set_window_handle (GstGLWindow * window, guintptr handle)
window_eagl = GST_GL_WINDOW_EAGL (window);
context = gst_gl_window_get_context (window);
window_eagl->priv->view = (UIView *) handle;
window_eagl->priv->view = (gpointer)handle;
GST_INFO_OBJECT (context, "handle set, updating layer");
gst_gl_context_eagl_update_layer (context);
@ -159,10 +159,10 @@ gst_gl_window_eagl_send_message_async (GstGLWindow * window,
destroy (data);
gst_object_unref (context);
} else {
dispatch_async (window_eagl->priv->gl_queue, ^{
dispatch_async ((__bridge dispatch_queue_t)(window_eagl->priv->gl_queue), ^{
gst_gl_context_activate (context, TRUE);
gst_object_unref (context);
callback (data);
gst_object_unref (context);
if (destroy)
destroy (data);
});
@ -184,7 +184,7 @@ draw_cb (gpointer data)
CGSize size;
CAEAGLLayer *eagl_layer;
eagl_layer = (CAEAGLLayer *)[window_eagl->priv->view layer];
eagl_layer = (CAEAGLLayer *)[GS_GL_WINDOW_EAGL_VIEW(window_eagl) layer];
size = eagl_layer.frame.size;
if (window->queue_resize || window_eagl->priv->window_width != size.width ||

View file

@ -35,6 +35,7 @@ endif
libgstapplemedia_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \
-fobjc-arc \
$(GST_OBJCFLAGS_WITH_VERSION) \
$(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)

View file

@ -37,6 +37,8 @@ G_BEGIN_DECLS
#define GST_TYPE_AVF_ASSET_SRC \
(gst_avf_asset_src_get_type())
#define GST_AVF_ASSET_SRC_READER(obj) \
((__bridge GstAVFAssetReader *)(obj->reader))
#define GST_AVF_ASSET_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVF_ASSET_SRC,GstAVFAssetSrc))
#define GST_AVF_ASSET_SRC_CLASS(klass) \
@ -95,9 +97,9 @@ typedef enum
- (void) start : (GError **) error;
- (void) stop;
- (void) seekTo: (guint64) start : (guint64) stop : (GError **) error;
- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type;
- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type;
- (GstCaps *) getCaps: (GstAVFAssetReaderMediaType) type;
- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index;
- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index;
- (GstBuffer *) nextBuffer: (GstAVFAssetReaderMediaType) type : (GError **) error;
@end
@ -110,7 +112,10 @@ struct _GstAVFAssetSrc
gint selected_video_track;
gint selected_audio_track;
GstAVFAssetReader *reader;
/* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer reader;
GstAVFAssetSrcState state;
GMutex lock;
GstEvent *seek_event;

View file

@ -51,15 +51,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_avf_asset_src_debug);
#define MEDIA_TYPE_TO_STR(x) \
(x == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO ? "audio" : "video")
#define AVF_ASSET_READER_HAS_AUDIO(x) \
([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])
([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])
#define AVF_ASSET_READER_HAS_VIDEO(x) \
([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])
enum
{
@ -242,7 +236,6 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
OBJC_CALLOUT_BEGIN ();
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY: {
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
@ -252,7 +245,7 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_avf_asset_src_stop_all (self);
return GST_STATE_CHANGE_FAILURE;
}
self->reader = [[GstAVFAssetReader alloc] initWithURI:self->uri:&error];
self->reader = (__bridge_retained gpointer)([[GstAVFAssetReader alloc] initWithURI:self->uri:&error]);
if (error) {
GST_ELEMENT_ERROR (element, RESOURCE, FAILED, ("AVFAssetReader error"),
("%s", error->message));
@ -282,12 +275,11 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_avf_asset_src_stop (self);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
[self->reader release];
CFBridgingRelease(self->reader);
break;
default:
break;
}
OBJC_CALLOUT_END ();
return ret;
}
@ -323,18 +315,18 @@ gst_avf_asset_src_query (GstPad *pad, GstObject * parent, GstQuery *query)
ret = TRUE;
break;
case GST_QUERY_DURATION:
gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration);
gst_query_set_duration (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE;
break;
case GST_QUERY_POSITION:
gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position);
gst_query_set_position (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).position);
ret = TRUE;
break;
case GST_QUERY_SEEKING: {
GstFormat fmt;
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
if (fmt == GST_FORMAT_TIME) {
gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, self->reader.duration);
gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE;
}
break;
@ -362,7 +354,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
gboolean res = TRUE;
GError *error = NULL;
OBJC_CALLOUT_BEGIN ();
self = GST_AVF_ASSET_SRC (gst_pad_get_parent_element (pad));
switch (GST_EVENT_TYPE (event)) {
@ -409,7 +400,7 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
stop = GST_CLOCK_TIME_NONE;
}
gst_avf_asset_src_send_event (self, gst_event_new_flush_start ());
[self->reader seekTo: start: stop: &error];
[GST_AVF_ASSET_SRC_READER(self) seekTo: start: stop: &error];
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.rate = rate;
@ -439,7 +430,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
}
gst_object_unref (self);
OBJC_CALLOUT_END ();
return res;
}
@ -501,15 +491,14 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
GstFlowReturn ret, combined_ret;
GError *error;
OBJC_CALLOUT_BEGIN ();
GST_AVF_ASSET_SRC_LOCK (self);
if (self->state != GST_AVF_ASSET_SRC_STATE_READING) {
GST_AVF_ASSET_SRC_UNLOCK (self);
goto exit;
return;
}
buf = [self->reader nextBuffer:type:&error];
buf = [GST_AVF_ASSET_SRC_READER(self) nextBuffer:type:&error];
GST_AVF_ASSET_SRC_UNLOCK (self);
if (buf == NULL) {
@ -520,13 +509,13 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_ERROR);
gst_pad_pause_task (pad);
goto exit;
return;
}
gst_pad_push_event (pad, gst_event_new_eos ());
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_EOS);
gst_pad_pause_task (pad);
goto exit;
return;
}
ret = gst_pad_push (pad, buf);
@ -547,8 +536,6 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
gst_pad_pause_task (pad);
}
exit:
OBJC_CALLOUT_END ();
}
static void
@ -571,9 +558,8 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self)
GError *error = NULL;
gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN ();
[self->reader start: &error];
[GST_AVF_ASSET_SRC_READER(self) start: &error];
if (error != NULL) {
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("AVFAssetReader could not start reading"), ("%s", error->message));
@ -583,7 +569,6 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self)
}
exit:
OBJC_CALLOUT_END ();
return ret;
}
@ -592,7 +577,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event)
{
gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN ();
if (AVF_ASSET_READER_HAS_VIDEO (self)) {
ret |= gst_pad_push_event (self->videopad, gst_event_ref (event));
@ -602,7 +586,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event)
}
gst_event_unref (event);
OBJC_CALLOUT_END ();
return ret;
}
@ -611,25 +594,24 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
{
GstSegment segment;
OBJC_CALLOUT_BEGIN ();
if (self->state == GST_AVF_ASSET_SRC_STATE_STARTED) {
goto exit;
return;
}
GST_DEBUG_OBJECT (self, "Creating pads and starting reader");
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.duration = self->reader.duration;
segment.duration = GST_AVF_ASSET_SRC_READER(self).duration;
/* We call AVFAssetReader's startReading when the pads are linked
* and no outputs can be added afterwards, so the tracks must be
* selected before adding any of the new pads */
if (AVF_ASSET_READER_HAS_AUDIO (self)) {
[self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO:
[GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO:
self->selected_audio_track];
}
if (AVF_ASSET_READER_HAS_VIDEO (self)) {
[self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO:
[GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO:
self->selected_video_track];
}
@ -643,9 +625,9 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
gst_pad_set_active (self->audiopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->audiopad);
gst_pad_set_caps (self->audiopad,
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]);
[GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]);
gst_pad_push_event (self->audiopad, gst_event_new_caps (
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]));
[GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]));
gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->audiopad);
}
@ -659,35 +641,31 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
gst_pad_set_active (self->videopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->videopad);
gst_pad_set_caps (self->videopad,
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]);
[GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]);
gst_pad_push_event (self->videopad, gst_event_new_caps (
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]));
[GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]));
gst_pad_push_event (self->videopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->videopad);
}
gst_element_no_more_pads (GST_ELEMENT (self));
self->state = GST_AVF_ASSET_SRC_STATE_STARTED;
exit:
OBJC_CALLOUT_END ();
}
static void
gst_avf_asset_src_stop (GstAVFAssetSrc *self)
{
gboolean has_audio, has_video;
OBJC_CALLOUT_BEGIN();
if (self->state == GST_AVF_ASSET_SRC_STATE_STOPPED) {
goto exit;
return;
}
GST_DEBUG ("Stopping tasks and removing pads");
has_audio = AVF_ASSET_READER_HAS_AUDIO (self);
has_video = AVF_ASSET_READER_HAS_VIDEO (self);
[self->reader stop];
[GST_AVF_ASSET_SRC_READER(self) stop];
if (has_audio) {
gst_pad_stop_task (self->audiopad);
@ -699,9 +677,6 @@ gst_avf_asset_src_stop (GstAVFAssetSrc *self)
}
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
exit:
OBJC_CALLOUT_END ();
}
static gboolean
@ -811,7 +786,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro
AVAsset *asset;
gboolean ret = FALSE;
OBJC_CALLOUT_BEGIN ();
str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str];
asset = [AVAsset assetWithURL: url];
@ -824,7 +798,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro
g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
"Invalid URI '%s' for avfassetsrc", uri);
}
OBJC_CALLOUT_END ();
return ret;
}
@ -872,11 +845,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
- (void) releaseReader
{
[video_track release];
[audio_track release];
[video_tracks release];
[audio_tracks release];
[reader release];
video_track = nil;
audio_track = nil;
video_tracks = nil;
audio_tracks = nil;
reader = nil;
}
- (void) initReader: (GError **) error
@ -889,13 +862,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
[nserror.description UTF8String]);
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_INIT, "%s",
[nserror.description UTF8String]);
[asset release];
[reader release];
return;
}
audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain];
video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain];
audio_tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
GST_INFO ("Found %lu video tracks and %lu audio tracks",
(unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]);
@ -911,12 +883,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str];
asset = [[AVAsset assetWithURL: url] retain];
asset = [AVAsset assetWithURL: url];
if (!asset.playable) {
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_NOT_PLAYABLE,
"Media is not playable");
[asset release];
asset = nil;
return nil;
}
@ -940,11 +912,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return self;
}
- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index
- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index
{
NSArray *tracks;
AVAssetTrack *track;
AVAssetReaderOutput **output;
AVAssetReaderOutput * __strong *output;
NSDictionary *settings;
NSString *mediaType;
gint *selected_track;
@ -978,7 +950,6 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
*output = [AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:track
outputSettings:settings];
[*output retain];
[reader addOutput:*output];
return TRUE;
}
@ -999,11 +970,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
- (void) stop
{
[self->reader cancelReading];
[reader cancelReading];
reading = FALSE;
}
- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type
- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type
{
if (type == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO) {
return [audio_tracks count] != 0;
@ -1122,10 +1093,9 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return caps;
}
- (oneway void) release
- (void) dealloc
{
[asset release];
asset = nil;
[self releaseReader];
if (audio_caps != NULL) {

View file

@ -33,7 +33,7 @@ G_BEGIN_DECLS
#define GST_AVF_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVF_VIDEO_SRC, GstAVFVideoSrcClass))
#define GST_AVF_VIDEO_SRC_IMPL(obj) \
((GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl)
((__bridge GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl)
#define GST_IS_AVF_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_VIDEO_SRC))
#define GST_IS_AVF_VIDEO_SRC_CLASS(klass) \
@ -46,6 +46,8 @@ struct _GstAVFVideoSrc
{
GstPushSrc push_src;
/* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer impl;
};

View file

@ -201,12 +201,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- (void)finalize
{
dispatch_release (mainQueue);
mainQueue = NULL;
dispatch_release (workerQueue);
workerQueue = NULL;
[super finalize];
}
- (BOOL)openDeviceInput
@ -231,7 +227,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
device = [devices objectAtIndex:deviceIndex];
}
g_assert (device != nil);
[device retain];
GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
@ -242,11 +237,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
("Failed to open device: %s",
[[err localizedDescription] UTF8String]),
(NULL));
[device release];
device = nil;
return NO;
}
[input retain];
return YES;
}
@ -280,7 +273,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
screenInput.capturesMouseClicks = captureScreenMouseClicks;
input = screenInput;
[input retain];
return YES;
#endif
}
@ -337,17 +329,13 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
[session removeInput:input];
[session removeOutput:output];
[session release];
session = nil;
[input release];
input = nil;
[output release];
output = nil;
if (!captureScreen) {
[device release];
device = nil;
}
@ -457,7 +445,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
CMVideoDimensions dimensions;
/* formatDescription can't be retrieved with valueForKey so use a selector here */
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
@ -529,7 +517,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
CMFormatDescriptionRef formatDescription;
CMVideoDimensions dimensions;
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (dimensions.width == info->width && dimensions.height == info->height) {
found_format = TRUE;
@ -805,9 +793,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
dispatch_sync (mainQueue, ^{ [session stopRunning]; });
dispatch_sync (workerQueue, ^{});
[bufQueueLock release];
bufQueueLock = nil;
[bufQueue release];
bufQueue = nil;
if (textureCache)
@ -902,7 +888,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
if ([bufQueue count] == BUFFER_QUEUE_SIZE)
[bufQueue removeLastObject];
[bufQueue insertObject:@{@"sbuf": (id)sampleBuffer,
[bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
@"timestamp": @(timestamp),
@"duration": @(duration)}
atIndex:0];
@ -925,7 +911,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
sbuf = (CMSampleBufferRef) dic[@"sbuf"];
sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
duration = (GstClockTime) [dic[@"duration"] longLongValue];
CFRetain (sbuf);
@ -1224,28 +1210,16 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
0, "iOS AVFoundation video source");
}
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
static void
gst_avf_video_src_init (GstAVFVideoSrc * src)
{
OBJC_CALLOUT_BEGIN ();
src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
OBJC_CALLOUT_END ();
src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
}
static void
gst_avf_video_src_finalize (GObject * obj)
{
OBJC_CALLOUT_BEGIN ();
[GST_AVF_VIDEO_SRC_IMPL (obj) release];
OBJC_CALLOUT_END ();
CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
@ -1320,9 +1294,7 @@ gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
OBJC_CALLOUT_END ();
return ret;
}
@ -1332,9 +1304,7 @@ gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{
GstCaps *ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
OBJC_CALLOUT_END ();
return ret;
}
@ -1344,9 +1314,7 @@ gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
OBJC_CALLOUT_END ();
return ret;
}
@ -1356,9 +1324,7 @@ gst_avf_video_src_start (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
OBJC_CALLOUT_END ();
return ret;
}
@ -1368,9 +1334,7 @@ gst_avf_video_src_stop (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
OBJC_CALLOUT_END ();
return ret;
}
@ -1380,9 +1344,7 @@ gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
OBJC_CALLOUT_END ();
return ret;
}
@ -1392,9 +1354,7 @@ gst_avf_video_src_unlock (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
OBJC_CALLOUT_END ();
return ret;
}
@ -1404,9 +1364,7 @@ gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
OBJC_CALLOUT_END ();
return ret;
}
@ -1416,9 +1374,7 @@ gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
{
GstFlowReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
OBJC_CALLOUT_END ();
return ret;
}
@ -1429,9 +1385,7 @@ gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
GstCaps *ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
OBJC_CALLOUT_END ();
return ret;
}
@ -1442,9 +1396,7 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
{
gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
OBJC_CALLOUT_END ();
return ret;
}
@ -1452,7 +1404,5 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
static void
gst_avf_video_src_set_context (GstElement * element, GstContext * context)
{
OBJC_CALLOUT_BEGIN ();
[GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
OBJC_CALLOUT_END ();
}

View file

@ -42,6 +42,8 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
#define GST_IS_AV_SAMPLE_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
#define GST_AV_SAMPLE_VIDEO_SINK_LAYER(obj) \
((__bridge AVSampleBufferDisplayLayer *)(obj->layer))
typedef struct _GstAVSampleVideoSink GstAVSampleVideoSink;
typedef struct _GstAVSampleVideoSinkClass GstAVSampleVideoSinkClass;
@ -50,7 +52,9 @@ struct _GstAVSampleVideoSink
{
GstVideoSink video_sink;
AVSampleBufferDisplayLayer *layer;
/* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer layer;
GstVideoInfo info;

View file

@ -156,11 +156,11 @@ static void
gst_av_sample_video_sink_finalize (GObject * object)
{
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
__block AVSampleBufferDisplayLayer *layer = av_sink->layer;
__block gpointer layer = av_sink->layer;
if (layer) {
dispatch_async (dispatch_get_main_queue (), ^{
[layer release];
CFBridgingRelease(layer);
});
}
@ -198,19 +198,21 @@ gst_av_sample_video_sink_start (GstBaseSink * bsink)
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
if ([NSThread isMainThread]) {
av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
av_sink->layer.videoGravity = AVLayerVideoGravityResize;
layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
} else {
dispatch_sync (dispatch_get_main_queue (), ^{
av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
av_sink->layer.videoGravity = AVLayerVideoGravityResize;
layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
});
}
@ -224,7 +226,7 @@ _stop_requesting_data (GstAVSampleVideoSink * av_sink)
{
if (av_sink->layer) {
if (av_sink->layer_requesting_data)
[av_sink->layer stopRequestingMediaData];
[GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
av_sink->layer_requesting_data = FALSE;
}
}
@ -243,7 +245,7 @@ gst_av_sample_video_sink_stop (GstBaseSink * bsink)
g_mutex_lock (&av_sink->render_lock);
_stop_requesting_data (av_sink);
g_mutex_unlock (&av_sink->render_lock);
[av_sink->layer flushAndRemoveImage];
[GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
}
return TRUE;
@ -661,11 +663,12 @@ _enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf)
kCFBooleanTrue);
}
AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
av_sink->layer.videoGravity = AVLayerVideoGravityResize;
[av_sink->layer enqueueSampleBuffer:sample_buf];
layer.videoGravity = AVLayerVideoGravityResize;
[layer enqueueSampleBuffer:sample_buf];
CFRelease (pbuf);
CFRelease (sample_buf);
@ -678,13 +681,14 @@ _request_data (GstAVSampleVideoSink * av_sink)
{
av_sink->layer_requesting_data = TRUE;
[av_sink->layer requestMediaDataWhenReadyOnQueue:
AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
[layer requestMediaDataWhenReadyOnQueue:
dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
usingBlock:^{
while (TRUE) {
/* don't needlessly fill up avsamplebufferdisplaylayer's queue.
* This also allows us to skip displaying late frames */
if (!av_sink->layer.readyForMoreMediaData)
if (!layer.readyForMoreMediaData)
break;
g_mutex_lock (&av_sink->render_lock);
@ -752,9 +756,10 @@ gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
if ([av_sink->layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
[[[av_sink->layer error] description] UTF8String]);
[[[layer error] description] UTF8String]);
return GST_FLOW_ERROR;
}
#endif

View file

@ -44,6 +44,12 @@ G_BEGIN_DECLS
#define GST_IS_IOS_ASSET_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IOS_ASSET_SRC))
#define GST_IOS_ASSET_SRC_CAST(obj) ((GstIOSAssetSrc*) obj)
#define GST_IOS_ASSET_SRC_ASSET(obj) \
(__bridge ALAssetRepresentation *)(obj->asset)
#define GST_IOS_ASSET_SRC_LIBRARY(obj) \
(__bridge GstAssetsLibrary *)(obj->library)
#define GST_IOS_ASSET_SRC_URL(obj) \
(__bridge NSURL *)(obj->url)
typedef struct _GstIOSAssetSrc GstIOSAssetSrc;
typedef struct _GstIOSAssetSrcClass GstIOSAssetSrcClass;
@ -68,9 +74,12 @@ struct _GstIOSAssetSrc {
/*< private >*/
gchar * uri; /* asset uri */
NSURL * url; /* asset url */
ALAssetRepresentation * asset; /* asset representation */
GstAssetsLibrary * library; /* assets library */
/* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer url; /* asset url */
gpointer asset; /* asset representation */
gpointer library; /* assets library */
};
struct _GstIOSAssetSrcClass {

View file

@ -52,12 +52,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_ios_asset_src_debug);
#define DEFAULT_BLOCKSIZE 4*1024
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
enum
{
@ -142,25 +136,22 @@ gst_ios_asset_src_class_init (GstIOSAssetSrcClass * klass)
static void
gst_ios_asset_src_init (GstIOSAssetSrc * src)
{
OBJC_CALLOUT_BEGIN ();
src->uri = NULL;
src->asset = NULL;
src->library = [[[GstAssetsLibrary alloc] init] retain];
src->library = (__bridge_retained gpointer)[[GstAssetsLibrary alloc] init];
gst_base_src_set_blocksize (GST_BASE_SRC (src), DEFAULT_BLOCKSIZE);
OBJC_CALLOUT_END ();
}
static void
gst_ios_asset_src_free_resources (GstIOSAssetSrc *src)
{
OBJC_CALLOUT_BEGIN ();
if (src->asset != NULL) {
[src->asset release];
CFBridgingRelease(src->asset);
src->asset = NULL;
}
if (src->url != NULL) {
[src->url release];
CFBridgingRelease(src->url);
src->url = NULL;
}
@ -168,7 +159,6 @@ gst_ios_asset_src_free_resources (GstIOSAssetSrc *src)
g_free (src->uri);
src->uri = NULL;
}
OBJC_CALLOUT_END ();
}
static void
@ -176,12 +166,10 @@ gst_ios_asset_src_finalize (GObject * object)
{
GstIOSAssetSrc *src;
OBJC_CALLOUT_BEGIN ();
src = GST_IOS_ASSET_SRC (object);
gst_ios_asset_src_free_resources (src);
[src->library release];
CFBridgingRelease(src->library);
OBJC_CALLOUT_END ();
G_OBJECT_CLASS (gst_ios_asset_src_parent_class)->finalize (object);
}
@ -192,7 +180,6 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err
NSString *nsuristr;
NSURL *url;
OBJC_CALLOUT_BEGIN ();
/* the element must be stopped in order to do this */
GST_OBJECT_LOCK (src);
state = GST_STATE (src);
@ -213,11 +200,10 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err
}
GST_INFO_OBJECT (src, "URI : %s", src->uri);
src->url = url;
src->url = (__bridge_retained gpointer)url;
src->uri = g_strdup (uri);
g_object_notify (G_OBJECT (src), "uri");
OBJC_CALLOUT_END ();
return TRUE;
/* ERROR */
@ -229,7 +215,6 @@ wrong_state:
"Changing the 'uri' property on iosassetsrc when an asset is "
"open is not supported.");
GST_OBJECT_UNLOCK (src);
OBJC_CALLOUT_END ();
return FALSE;
}
}
@ -285,7 +270,6 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
GstFlowReturn ret;
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN ();
buf = gst_buffer_new_and_alloc (length);
if (G_UNLIKELY (buf == NULL && length > 0)) {
GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length);
@ -296,10 +280,10 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
gst_buffer_map (buf, &info, GST_MAP_READWRITE);
/* No need to read anything if length is 0 */
bytes_read = [src->asset getBytes: info.data
fromOffset:offset
length:length
error:&err];
bytes_read = [GST_IOS_ASSET_SRC_ASSET(src) getBytes: info.data
fromOffset:offset
length:length
error:&err];
if (G_UNLIKELY (err != NULL)) {
goto could_not_read;
}
@ -333,7 +317,6 @@ could_not_read:
}
exit:
{
OBJC_CALLOUT_END ();
return ret;
}
@ -374,9 +357,7 @@ gst_ios_asset_src_get_size (GstBaseSrc * basesrc, guint64 * size)
src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN ();
*size = (guint64) [src->asset size];
OBJC_CALLOUT_END ();
*size = (guint64) [GST_IOS_ASSET_SRC_ASSET(src) size];
return TRUE;
}
@ -386,8 +367,7 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc)
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN ();
src->asset = [[src->library assetForURLSync: src->url] retain];
src->asset = (__bridge_retained gpointer)[GST_IOS_ASSET_SRC_LIBRARY(src) assetForURLSync: GST_IOS_ASSET_SRC_URL(src)];
if (src->asset == NULL) {
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ,
@ -396,7 +376,6 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc)
ret = FALSE;
};
OBJC_CALLOUT_END ();
return ret;
}
@ -406,9 +385,7 @@ gst_ios_asset_src_stop (GstBaseSrc * basesrc)
{
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN ();
[src->asset release];
OBJC_CALLOUT_END ();
CFBridgingRelease(src->asset);
return TRUE;
}
@ -480,24 +457,23 @@ gst_ios_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
dispatch_async(queue, ^{
[self assetForURL:uri resultBlock:
^(ALAsset *myasset)
{
self.asset = myasset;
self.result = [myasset defaultRepresentation];
^(ALAsset *myasset)
{
self.asset = myasset;
self.result = [myasset defaultRepresentation];
dispatch_semaphore_signal(sema);
}
failureBlock:
^(NSError *myerror)
{
self.result = nil;
dispatch_semaphore_signal(sema);
}
dispatch_semaphore_signal(sema);
}
failureBlock:
^(NSError *myerror)
{
self.result = nil;
dispatch_semaphore_signal(sema);
}
];
});
dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
dispatch_release(sema);
return self.result;
}

View file

@ -56,7 +56,6 @@ enable_mt_mode (void)
{
NSThread * th = [[NSThread alloc] init];
[th start];
[th release];
g_assert ([NSThread isMultiThreaded]);
}
#endif

View file

@ -45,10 +45,10 @@ typedef struct _ContextThreadData
typedef struct _TextureWrapper
{
#if HAVE_IOS
CVOpenGLESTextureCacheRef *cache;
CVOpenGLESTextureCacheRef cache;
CVOpenGLESTextureRef texture;
#else
CVOpenGLTextureCacheRef *cache;
CVOpenGLTextureCacheRef cache;
CVOpenGLTextureRef texture;
#endif
@ -69,7 +69,7 @@ gst_video_texture_cache_new (GstGLContext * ctx)
CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CVOpenGLESTextureCacheCreate (kCFAllocatorDefault, (CFDictionaryRef) cache_attrs,
(CVEAGLContext) gst_gl_context_get_gl_context (ctx), NULL, &cache->cache);
(__bridge CVEAGLContext) (gpointer)gst_gl_context_get_gl_context (ctx), NULL, &cache->cache);
#else
gst_ios_surface_memory_init ();
#if 0
@ -199,7 +199,8 @@ _do_create_memory (GstGLContext * context, ContextThreadData * data)
success: {
TextureWrapper *texture_data = g_new(TextureWrapper, 1);
texture_data->cache = CFRetain(cache->cache);
CFRetain(cache->cache);
texture_data->cache = cache->cache;
texture_data->texture = texture;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size);

View file

@ -5,6 +5,7 @@ noinst_PROGRAMS = cocoa-videooverlay
cocoa_videooverlay_SOURCES = cocoa-videooverlay.m
cocoa_videooverlay_OBJCFLAGS=-Wno-error=unused-command-line-argument \
-fobjc-arc \
-I$(top_srcdir)/gst-libs -I$(top_builddir)/gst-libs \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
$(GL_CFLAGS) $(GL_OBJCFLAGS)

View file

@ -121,8 +121,6 @@ static GstBusSyncReply create_window (GstBus* bus, GstMessage* message, MainWind
static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window)
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
g_print ("end of stream\n");
gst_element_set_state ([window pipeline], GST_STATE_NULL);
@ -130,8 +128,6 @@ static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window)
g_main_loop_quit ([window loop]);
[window performSelectorOnMainThread:@selector(customClose) withObject:nil waitUntilDone:YES];
[pool release];
}
static gpointer thread_func (MainWindow* window)
@ -162,11 +158,9 @@ int main(int argc, char **argv)
gboolean ok=FALSE;
GstBus *bus=NULL;
GThread *loop_thread=NULL;
NSAutoreleasePool *pool=nil;
NSRect rect;
MainWindow *window=nil;
pool = [[NSAutoreleasePool alloc] init];
[NSApplication sharedApplication];
g_print("app created\n");
@ -202,14 +196,15 @@ int main(int argc, char **argv)
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), window);
g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), window);
g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), window);
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, window, NULL);
/* NOTE: window is not bridge_retained because its lifetime is just this function */
g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, (__bridge gpointer)window, NULL);
gst_object_unref (bus);
loop_thread = g_thread_new (NULL,
(GThreadFunc) thread_func, window);
(GThreadFunc) thread_func, (__bridge gpointer)window);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
@ -225,9 +220,5 @@ int main(int argc, char **argv)
g_thread_join (loop_thread);
[window release];
[pool release];
return 0;
}