applemedia/gl: Update code to use ARC

All code interacting with Objective-C objects should now use Automated
Reference Counting rather than manual memory management or Garbage
Collection. Because ARC prohibits C-structs from containing
references to Objective-C objects, all such fields are now typed
'gpointer'. Setting and gettings Objective-C fields on such a
struct now uses explicit __bridge_* calls to tell ARC about
object lifetimes.

https://bugzilla.gnome.org/show_bug.cgi?id=777847
This commit is contained in:
Nick Kallen 2017-02-03 14:46:39 +01:00 committed by Sebastian Dröge
parent 201e71c3aa
commit 46bbc60d24
25 changed files with 244 additions and 302 deletions

View file

@ -61,7 +61,7 @@ struct _GstCAOpenGLLayerSink
GstBuffer *next_buffer; GstBuffer *next_buffer;
GstBuffer *next_sync; GstBuffer *next_sync;
GstGLCAOpenGLLayer *layer; gpointer layer;
gboolean keep_aspect_ratio; gboolean keep_aspect_ratio;

View file

@ -79,7 +79,7 @@ gst_ca_opengl_layer_sink_bin_get_property (GObject * object, guint prop_id,
static void static void
gst_ca_opengl_layer_sink_bin_init (GstCAOpenGLLayerSinkBin * self) gst_ca_opengl_layer_sink_bin_init (GstCAOpenGLLayerSinkBin * self)
{ {
GstGLCAOpenGLLayer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL); gpointer *sink = g_object_new (GST_TYPE_CA_OPENGL_LAYER_SINK, NULL);
g_signal_connect (sink, "notify::layer", G_CALLBACK (_on_notify_layer), self); g_signal_connect (sink, "notify::layer", G_CALLBACK (_on_notify_layer), self);
@ -266,6 +266,11 @@ gst_ca_opengl_layer_sink_finalize (GObject * object)
g_mutex_clear (&ca_sink->drawing_lock); g_mutex_clear (&ca_sink->drawing_lock);
if (ca_sink->layer) {
CFRelease(ca_sink->layer);
ca_sink->layer = NULL;
}
GST_DEBUG ("finalized"); GST_DEBUG ("finalized");
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
@ -322,13 +327,16 @@ static void
_create_layer (gpointer data) _create_layer (gpointer data)
{ {
GstCAOpenGLLayerSink *ca_sink = data; GstCAOpenGLLayerSink *ca_sink = data;
id layer;
if (!ca_sink->layer) { if (!ca_sink->layer) {
ca_sink->layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc] layer = [[NSClassFromString(@"GstGLCAOpenGLLayer") alloc]
initWithGstGLContext:GST_GL_CONTEXT_COCOA (ca_sink->context)]; initWithGstGLContext:GST_GL_CONTEXT_COCOA (ca_sink->context)];
[ca_sink->layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
ca_sink->layer = (__bridge_retained gpointer)layer;
[layer setDrawCallback:(GstGLWindowCB)gst_ca_opengl_layer_sink_on_draw
data:ca_sink notify:NULL]; data:ca_sink notify:NULL];
[ca_sink->layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize [layer setResizeCallback:(GstGLWindowResizeCB)gst_ca_opengl_layer_sink_on_resize
data:ca_sink notify:NULL]; data:ca_sink notify:NULL];
g_object_notify (G_OBJECT (ca_sink), "layer"); g_object_notify (G_OBJECT (ca_sink), "layer");
} }
@ -507,6 +515,10 @@ gst_ca_opengl_layer_sink_change_state (GstElement * element, GstStateChange tran
break; break;
} }
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
if (ca_sink->layer) {
CFRelease(ca_sink->layer);
ca_sink->layer = NULL;
}
break; break;
default: default:
break; break;
@ -698,7 +710,7 @@ gst_ca_opengl_layer_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
/* The layer will automatically call the draw callback to draw the new /* The layer will automatically call the draw callback to draw the new
* content */ * content */
[CATransaction begin]; [CATransaction begin];
[ca_sink->layer setNeedsDisplay]; [(__bridge GstGLCAOpenGLLayer *)(ca_sink->layer) setNeedsDisplay];
[CATransaction commit]; [CATransaction commit];
GST_TRACE ("post redisplay"); GST_TRACE ("post redisplay");

View file

@ -29,6 +29,7 @@ libgstgl_cocoa_la_CFLAGS = \
libgstgl_cocoa_la_OBJCFLAGS = \ libgstgl_cocoa_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \ -I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \ -I$(top_builddir)/gst-libs \
-fobjc-arc \
$(GL_OBJCFLAGS) \ $(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \ $(GST_BASE_CFLAGS) \

View file

@ -52,20 +52,18 @@ _init_debug (void)
gst_object_unref (self->draw_context); gst_object_unref (self->draw_context);
GST_TRACE ("dealloc GstGLCAOpenGLLayer %p context %p", self, self->gst_gl_context); GST_TRACE ("dealloc GstGLCAOpenGLLayer %p context %p", self, self->gst_gl_context);
[super dealloc];
} }
static void static void
_context_ready (gpointer data) _context_ready (gpointer data)
{ {
GstGLCAOpenGLLayer *ca_layer = data; GstGLCAOpenGLLayer *ca_layer = (__bridge GstGLCAOpenGLLayer *) data;
g_atomic_int_set (&ca_layer->can_draw, 1); g_atomic_int_set (&ca_layer->can_draw, 1);
} }
- (id)initWithGstGLContext:(GstGLContextCocoa *)parent_gl_context { - (id)initWithGstGLContext:(GstGLContextCocoa *)parent_gl_context {
[super init]; self = [super init];
_init_debug(); _init_debug();
@ -75,7 +73,7 @@ _context_ready (gpointer data)
self.needsDisplayOnBoundsChange = YES; self.needsDisplayOnBoundsChange = YES;
gst_gl_window_send_message_async (GST_GL_CONTEXT (parent_gl_context)->window, gst_gl_window_send_message_async (GST_GL_CONTEXT (parent_gl_context)->window,
(GstGLWindowCB) _context_ready, self, NULL); (GstGLWindowCB) _context_ready, (__bridge_retained gpointer)self, (GDestroyNotify)CFRelease);
return self; return self;
} }
@ -177,7 +175,7 @@ _context_ready (gpointer data)
- (void)setResizeCallback:(GstGLWindowResizeCB)cb data:(gpointer)data - (void)setResizeCallback:(GstGLWindowResizeCB)cb data:(gpointer)data
notify:(GDestroyNotify)notify { notify:(GDestroyNotify)notify {
if (self->resize_notify) if (self->resize_notify)
self->resize_notify (self->resize_notify); self->resize_notify (self->resize_data);
self->resize_cb = cb; self->resize_cb = cb;
self->resize_data = data; self->resize_data = data;

View file

@ -174,7 +174,6 @@ gst_gl_context_cocoa_create_context (GstGLContext *context, GstGLAPI gl_api,
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window); GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLAPI context_api = GST_GL_API_NONE; GstGLAPI context_api = GST_GL_API_NONE;
const GLint swapInterval = 1; const GLint swapInterval = 1;
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
CGLPixelFormatObj fmt = NULL; CGLPixelFormatObj fmt = NULL;
CGLContextObj glContext; CGLContextObj glContext;
CGLPixelFormatAttribute attribs[] = { CGLPixelFormatAttribute attribs[] = {
@ -274,7 +273,6 @@ gst_gl_context_cocoa_create_context (GstGLContext *context, GstGLAPI gl_api,
if (window) if (window)
gst_object_unref (window); gst_object_unref (window);
[pool release];
return TRUE; return TRUE;
@ -282,7 +280,6 @@ error:
{ {
if (window) if (window)
gst_object_unref (window); gst_object_unref (window);
[pool release];
return FALSE; return FALSE;
} }
} }

View file

@ -48,7 +48,6 @@ static GCond nsapp_cond;
static gboolean static gboolean
gst_gl_display_cocoa_nsapp_iteration (gpointer data) gst_gl_display_cocoa_nsapp_iteration (gpointer data)
{ {
NSAutoreleasePool *pool = nil;
NSEvent *event = nil; NSEvent *event = nil;
if (![NSThread isMainThread]) { if (![NSThread isMainThread]) {
@ -56,7 +55,6 @@ gst_gl_display_cocoa_nsapp_iteration (gpointer data)
return FALSE; return FALSE;
} }
pool = [[NSAutoreleasePool alloc] init];
while ((event = ([NSApp nextEventMatchingMask:NSAnyEventMask while ((event = ([NSApp nextEventMatchingMask:NSAnyEventMask
untilDate:[NSDate dateWithTimeIntervalSinceNow:0.05] untilDate:[NSDate dateWithTimeIntervalSinceNow:0.05]
@ -64,16 +62,12 @@ gst_gl_display_cocoa_nsapp_iteration (gpointer data)
[NSApp sendEvent:event]; [NSApp sendEvent:event];
} }
[pool release];
return TRUE; return TRUE;
} }
static void static void
gst_gl_display_cocoa_open_and_attach_source (gpointer data) gst_gl_display_cocoa_open_and_attach_source (gpointer data)
{ {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
if ([NSThread isMainThread]) { if ([NSThread isMainThread]) {
/* The sharedApplication class method initializes /* The sharedApplication class method initializes
* the display environment and connects your program * the display environment and connects your program
@ -89,8 +83,6 @@ gst_gl_display_cocoa_open_and_attach_source (gpointer data)
GST_DEBUG ("NSApp iteration loop attached, id %d", nsapp_source_id); GST_DEBUG ("NSApp iteration loop attached, id %d", nsapp_source_id);
} }
[pool release];
} }
static gboolean static gboolean

View file

@ -82,8 +82,8 @@ static void gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
struct _GstGLWindowCocoaPrivate struct _GstGLWindowCocoaPrivate
{ {
GstGLNSWindow *internal_win_id; gpointer internal_win_id;
NSView *external_view; gpointer external_view;
gboolean visible; gboolean visible;
gint preferred_width; gint preferred_width;
gint preferred_height; gint preferred_height;
@ -93,7 +93,7 @@ struct _GstGLWindowCocoaPrivate
/* atomic set when the internal NSView has been created */ /* atomic set when the internal NSView has been created */
int view_ready; int view_ready;
dispatch_queue_t gl_queue; gpointer gl_queue;
}; };
static void static void
@ -128,15 +128,15 @@ gst_gl_window_cocoa_init (GstGLWindowCocoa * window)
window->priv->preferred_width = 320; window->priv->preferred_width = 320;
window->priv->preferred_height = 240; window->priv->preferred_height = 240;
window->priv->gl_queue = window->priv->gl_queue = (__bridge_retained gpointer)
dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL); (dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL));
} }
static void static void
gst_gl_window_cocoa_finalize (GObject * object) gst_gl_window_cocoa_finalize (GObject * object)
{ {
GstGLWindowCocoa *window = GST_GL_WINDOW_COCOA (object); GstGLWindowCocoa *window = GST_GL_WINDOW_COCOA (object);
dispatch_release (window->priv->gl_queue); window->priv->gl_queue = NULL;
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
@ -156,6 +156,7 @@ gst_gl_window_cocoa_create_window (GstGLWindowCocoa *window_cocoa)
{ {
GstGLWindowCocoaPrivate *priv = window_cocoa->priv; GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa); GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLNSWindow *internal_win_id;
NSRect mainRect = [[NSScreen mainScreen] visibleFrame]; NSRect mainRect = [[NSScreen mainScreen] visibleFrame];
gint h = priv->preferred_height; gint h = priv->preferred_height;
gint y = mainRect.size.height > h ? (mainRect.size.height - h) * 0.5 : 0; gint y = mainRect.size.height > h ? (mainRect.size.height - h) * 0.5 : 0;
@ -168,14 +169,16 @@ gst_gl_window_cocoa_create_window (GstGLWindowCocoa *window_cocoa)
gst_object_unref (context); gst_object_unref (context);
priv->internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask: internal_win_id = [[GstGLNSWindow alloc] initWithContentRect:rect styleMask:
(NSTitledWindowMask | NSClosableWindowMask | (NSTitledWindowMask | NSClosableWindowMask |
NSResizableWindowMask | NSMiniaturizableWindowMask) NSResizableWindowMask | NSMiniaturizableWindowMask)
backing: NSBackingStoreBuffered defer: NO screen: nil gstWin: window_cocoa]; backing: NSBackingStoreBuffered defer: NO screen: nil gstWin: window_cocoa];
GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id); priv->internal_win_id = (__bridge_retained gpointer)internal_win_id;
[priv->internal_win_id setContentView:glView]; GST_DEBUG ("NSWindow id: %"G_GUINTPTR_FORMAT, (guintptr) priv->internal_win_id);
[internal_win_id setContentView:glView];
g_atomic_int_set (&window_cocoa->priv->view_ready, 1); g_atomic_int_set (&window_cocoa->priv->view_ready, 1);
@ -196,10 +199,11 @@ static void
gst_gl_window_cocoa_close (GstGLWindow *window) gst_gl_window_cocoa_close (GstGLWindow *window)
{ {
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window); GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
[[window_cocoa->priv->internal_win_id contentView] removeFromSuperview]; [[internal_win_id contentView] removeFromSuperview];
[window_cocoa->priv->internal_win_id release]; CFBridgingRelease(window_cocoa->priv->internal_win_id);
window_cocoa->priv->internal_win_id = nil; window_cocoa->priv->internal_win_id = NULL;
} }
static guintptr static guintptr
@ -219,7 +223,7 @@ gst_gl_window_cocoa_set_window_handle (GstGLWindow * window, guintptr handle)
if (priv->internal_win_id) { if (priv->internal_win_id) {
if (handle) { if (handle) {
priv->external_view = (NSView *) handle; priv->external_view = (gpointer)handle;
priv->visible = TRUE; priv->visible = TRUE;
} else { } else {
/* bring back our internal window */ /* bring back our internal window */
@ -229,17 +233,22 @@ gst_gl_window_cocoa_set_window_handle (GstGLWindow * window, guintptr handle)
dispatch_async (dispatch_get_main_queue (), ^{ dispatch_async (dispatch_get_main_queue (), ^{
NSView *view = [window_cocoa->priv->internal_win_id contentView]; GstGLNSWindow *internal_win_id =
[window_cocoa->priv->internal_win_id orderOut:window_cocoa->priv->internal_win_id]; (__bridge GstGLNSWindow *)window_cocoa->priv->internal_win_id;
NSView *external_view =
(__bridge NSView *)window_cocoa->priv->external_view;
[window_cocoa->priv->external_view addSubview: view]; NSView *view = [internal_win_id contentView];
[internal_win_id orderOut:internal_win_id];
[view setFrame: [window_cocoa->priv->external_view bounds]]; [external_view addSubview: view];
[view setFrame: [external_view bounds]];
[view setAutoresizingMask: NSViewWidthSizable|NSViewHeightSizable]; [view setAutoresizingMask: NSViewWidthSizable|NSViewHeightSizable];
}); });
} else { } else {
/* no internal window yet so delay it to the next drawing */ /* no internal window yet so delay it to the next drawing */
priv->external_view = (NSView*) handle; priv->external_view = (gpointer)handle;
priv->visible = FALSE; priv->visible = FALSE;
} }
} }
@ -249,11 +258,12 @@ _show_window (gpointer data)
{ {
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (data); GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (data);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv; GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG_OBJECT (window_cocoa, "make the window available\n"); GST_DEBUG_OBJECT (window_cocoa, "make the window available\n");
[priv->internal_win_id makeMainWindow]; [internal_win_id makeMainWindow];
[priv->internal_win_id orderFrontRegardless]; [internal_win_id orderFrontRegardless];
[priv->internal_win_id setViewsNeedDisplay:YES]; [internal_win_id setViewsNeedDisplay:YES];
priv->visible = TRUE; priv->visible = TRUE;
} }
@ -283,11 +293,13 @@ gst_gl_window_cocoa_queue_resize (GstGLWindow * window)
{ {
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window); GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view; GstGLNSView *view;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (!g_atomic_int_get (&window_cocoa->priv->view_ready)) if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return; return;
view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView]; view = (GstGLNSView *)[internal_win_id contentView];
[view->layer queueResize]; [view->layer queueResize];
} }
@ -297,13 +309,15 @@ gst_gl_window_cocoa_draw (GstGLWindow * window)
{ {
GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window); GstGLWindowCocoa *window_cocoa = GST_GL_WINDOW_COCOA (window);
GstGLNSView *view; GstGLNSView *view;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
/* As the view is created asynchronously in the main thread we cannot know /* As the view is created asynchronously in the main thread we cannot know
* exactly when it will be ready to draw to */ * exactly when it will be ready to draw to */
if (!g_atomic_int_get (&window_cocoa->priv->view_ready)) if (!g_atomic_int_get (&window_cocoa->priv->view_ready))
return; return;
view = (GstGLNSView *)[window_cocoa->priv->internal_win_id contentView]; view = (GstGLNSView *)[internal_win_id contentView];
/* this redraws the GstGLCAOpenGLLayer which calls /* this redraws the GstGLCAOpenGLLayer which calls
* gst_gl_window_cocoa_draw_thread(). Use an explicit CATransaction since we * gst_gl_window_cocoa_draw_thread(). Use an explicit CATransaction since we
@ -329,8 +343,9 @@ gst_gl_cocoa_draw_cb (GstGLWindowCocoa *window_cocoa)
{ {
GstGLWindowCocoaPrivate *priv = window_cocoa->priv; GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa); GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (window_cocoa->priv->internal_win_id && ![priv->internal_win_id isClosed]) { if (internal_win_id && ![internal_win_id isClosed]) {
GstGLWindow *window = GST_GL_WINDOW (window_cocoa); GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
/* draw opengl scene in the back buffer */ /* draw opengl scene in the back buffer */
@ -342,12 +357,13 @@ gst_gl_cocoa_draw_cb (GstGLWindowCocoa *window_cocoa)
static void static void
gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height) gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height)
{ {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
GstGLWindowCocoa *window_cocoa = view->window_cocoa; GstGLWindowCocoa *window_cocoa = view->window_cocoa;
GstGLWindow *window = GST_GL_WINDOW (window_cocoa); GstGLWindow *window = GST_GL_WINDOW (window_cocoa);
GstGLContext *context = gst_gl_window_get_context (window); GstGLContext *context = gst_gl_window_get_context (window);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
if (window_cocoa->priv->internal_win_id && ![window_cocoa->priv->internal_win_id isClosed]) { if (internal_win_id && ![internal_win_id isClosed]) {
const GstGLFuncs *gl; const GstGLFuncs *gl;
NSRect bounds = [view bounds]; NSRect bounds = [view bounds];
NSRect visibleRect = [view visibleRect]; NSRect visibleRect = [view visibleRect];
@ -376,7 +392,6 @@ gst_gl_cocoa_resize_cb (GstGLNSView * view, guint width, guint height)
} }
gst_object_unref (context); gst_object_unref (context);
[pool release];
} }
static void static void
@ -386,6 +401,8 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
GstGLWindowCocoa *window_cocoa = (GstGLWindowCocoa *) window; GstGLWindowCocoa *window_cocoa = (GstGLWindowCocoa *) window;
GstGLContext *context = gst_gl_window_get_context (window); GstGLContext *context = gst_gl_window_get_context (window);
GThread *thread = gst_gl_context_get_thread (context); GThread *thread = gst_gl_context_get_thread (context);
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
dispatch_queue_t gl_queue = (__bridge dispatch_queue_t)priv->gl_queue;
if (thread == g_thread_self()) { if (thread == g_thread_self()) {
/* this case happens for nested calls happening from inside the GCD queue */ /* this case happens for nested calls happening from inside the GCD queue */
@ -394,7 +411,7 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
destroy (data); destroy (data);
gst_object_unref (context); gst_object_unref (context);
} else { } else {
dispatch_async (window_cocoa->priv->gl_queue, ^{ dispatch_async (gl_queue, ^{
gst_gl_context_activate (context, TRUE); gst_gl_context_activate (context, TRUE);
gst_object_unref (context); gst_object_unref (context);
callback (data); callback (data);
@ -423,6 +440,9 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
m_isClosed = NO; m_isClosed = NO;
window_cocoa = cocoa; window_cocoa = cocoa;
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
NSView *external_view = (__bridge NSView *)priv->external_view;
self = [super initWithContentRect: contentRect self = [super initWithContentRect: contentRect
styleMask: styleMask backing: bufferingType styleMask: styleMask backing: bufferingType
@ -436,7 +456,7 @@ gst_gl_window_cocoa_send_message_async (GstGLWindow * window,
[self setBackgroundColor:[NSColor blackColor]]; [self setBackgroundColor:[NSColor blackColor]];
[self orderOut:window_cocoa->priv->internal_win_id]; [self orderOut:internal_win_id];
return self; return self;
} }
@ -473,8 +493,10 @@ close_window_cb (gpointer data)
/* Called in the main thread which is never the gl thread */ /* Called in the main thread which is never the gl thread */
- (BOOL) windowShouldClose:(id)sender { - (BOOL) windowShouldClose:(id)sender {
GstGLWindowCocoaPrivate *priv = window_cocoa->priv;
GstGLNSWindow *internal_win_id = (__bridge GstGLNSWindow *)priv->internal_win_id;
GST_DEBUG ("user clicked the close button\n"); GST_DEBUG ("user clicked the close button\n");
[window_cocoa->priv->internal_win_id setClosed]; [internal_win_id setClosed];
gst_gl_window_send_message_async (GST_GL_WINDOW (window_cocoa), gst_gl_window_send_message_async (GST_GL_WINDOW (window_cocoa),
(GstGLWindowCB) close_window_cb, gst_object_ref (window_cocoa), (GstGLWindowCB) close_window_cb, gst_object_ref (window_cocoa),
(GDestroyNotify) gst_object_unref); (GDestroyNotify) gst_object_unref);
@ -509,7 +531,7 @@ close_window_cb (gpointer data)
[self->layer setDrawCallback:(GstGLWindowCB)gst_gl_cocoa_draw_cb [self->layer setDrawCallback:(GstGLWindowCB)gst_gl_cocoa_draw_cb
data:window notify:NULL]; data:window notify:NULL];
[self->layer setResizeCallback:(GstGLWindowResizeCB)gst_gl_cocoa_resize_cb [self->layer setResizeCallback:(GstGLWindowResizeCB)gst_gl_cocoa_resize_cb
data:self notify:NULL]; data:(__bridge_retained gpointer)self notify:(GDestroyNotify)CFRelease];
[self setLayerContentsRedrawPolicy:NSViewLayerContentsRedrawOnSetNeedsDisplay]; [self setLayerContentsRedrawPolicy:NSViewLayerContentsRedrawOnSetNeedsDisplay];
@ -519,9 +541,7 @@ close_window_cb (gpointer data)
} }
- (void) dealloc { - (void) dealloc {
[self->layer release]; self->layer = nil;
[super dealloc];
} }
- (void)renewGState { - (void)renewGState {

View file

@ -21,6 +21,7 @@ libgstgl_eagl_la_CFLAGS = \
libgstgl_eagl_la_OBJCFLAGS = \ libgstgl_eagl_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \ -I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \ -I$(top_builddir)/gst-libs \
-fobj-arc \
$(GL_CFLAGS) \ $(GL_CFLAGS) \
$(GL_OBJCFLAGS) \ $(GL_OBJCFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) \

View file

@ -33,6 +33,11 @@ G_BEGIN_DECLS
#define GST_IS_GL_CONTEXT_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_CONTEXT_EAGL)) #define GST_IS_GL_CONTEXT_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_CONTEXT_EAGL))
#define GST_GL_CONTEXT_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_CONTEXT_EAGL, GstGLContextEaglClass)) #define GST_GL_CONTEXT_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_CONTEXT_EAGL, GstGLContextEaglClass))
#define GS_GL_CONTEXT_EAGL_CONTEXT(obj) \
((__bridge EAGLContext *)(obj->priv->eagl_context))
#define GS_GL_CONTEXT_EAGL_LAYER(obj) \
((__bridge CAEAGLLayer *)(obj->priv->eagl_layer))
typedef struct _GstGLContextEagl GstGLContextEagl; typedef struct _GstGLContextEagl GstGLContextEagl;
typedef struct _GstGLContextEaglPrivate GstGLContextEaglPrivate; typedef struct _GstGLContextEaglPrivate GstGLContextEaglPrivate;
typedef struct _GstGLContextEaglClass GstGLContextEaglClass; typedef struct _GstGLContextEaglClass GstGLContextEaglClass;

View file

@ -46,10 +46,10 @@ static GstGLPlatform gst_gl_context_eagl_get_gl_platform (GstGLContext *
struct _GstGLContextEaglPrivate struct _GstGLContextEaglPrivate
{ {
EAGLContext *eagl_context; gpointer eagl_context;
/* Used if we render to a window */ /* Used if we render to a window */
CAEAGLLayer *eagl_layer; gpointer eagl_layer;
GLuint framebuffer; GLuint framebuffer;
GLuint color_renderbuffer; GLuint color_renderbuffer;
GLuint depth_renderbuffer; GLuint depth_renderbuffer;
@ -106,7 +106,7 @@ gst_gl_context_eagl_resize (GstGLContextEagl * eagl_context)
int width, height; int width, height;
glBindRenderbuffer (GL_RENDERBUFFER, eagl_context->priv->color_renderbuffer); glBindRenderbuffer (GL_RENDERBUFFER, eagl_context->priv->color_renderbuffer);
[eagl_context->priv->eagl_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:eagl_context->priv->eagl_layer]; [GS_GL_CONTEXT_EAGL_CONTEXT(eagl_context) renderbufferStorage:GL_RENDERBUFFER fromDrawable:GS_GL_CONTEXT_EAGL_LAYER(eagl_context)];
glGetRenderbufferParameteriv (GL_RENDERBUFFER, glGetRenderbufferParameteriv (GL_RENDERBUFFER,
GL_RENDERBUFFER_WIDTH, &width); GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv (GL_RENDERBUFFER, glGetRenderbufferParameteriv (GL_RENDERBUFFER,
@ -126,7 +126,7 @@ gst_gl_context_eagl_release_layer (GstGLContext * context)
if (context_eagl->priv->eagl_layer) { if (context_eagl->priv->eagl_layer) {
gst_gl_context_eagl_activate (context, TRUE); gst_gl_context_eagl_activate (context, TRUE);
[context_eagl->priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil]; [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:nil];
glDeleteFramebuffers (1, &context_eagl->priv->framebuffer); glDeleteFramebuffers (1, &context_eagl->priv->framebuffer);
context_eagl->priv->framebuffer = 0; context_eagl->priv->framebuffer = 0;
@ -156,7 +156,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
UIView *window_handle = nil; UIView *window_handle = nil;
GstGLWindow *window = gst_gl_context_get_window (context); GstGLWindow *window = gst_gl_context_get_window (context);
if (window) if (window)
window_handle = (UIView *) gst_gl_window_get_window_handle (window); window_handle = (__bridge UIView *)((void *)gst_gl_window_get_window_handle (window));
if (!window_handle) { if (!window_handle) {
GST_INFO_OBJECT (context, "window handle not set yet, not updating layer"); GST_INFO_OBJECT (context, "window handle not set yet, not updating layer");
@ -170,7 +170,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
gst_gl_context_eagl_release_layer (context); gst_gl_context_eagl_release_layer (context);
eagl_layer = (CAEAGLLayer *)[window_handle layer]; eagl_layer = (CAEAGLLayer *)[window_handle layer];
[EAGLContext setCurrentContext:priv->eagl_context]; [EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)];
/* Allocate framebuffer */ /* Allocate framebuffer */
glGenFramebuffers (1, &framebuffer); glGenFramebuffers (1, &framebuffer);
@ -178,7 +178,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
/* Allocate color render buffer */ /* Allocate color render buffer */
glGenRenderbuffers (1, &color_renderbuffer); glGenRenderbuffers (1, &color_renderbuffer);
glBindRenderbuffer (GL_RENDERBUFFER, color_renderbuffer); glBindRenderbuffer (GL_RENDERBUFFER, color_renderbuffer);
[priv->eagl_context renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer]; [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) renderbufferStorage: GL_RENDERBUFFER fromDrawable:eagl_layer];
glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, glFramebufferRenderbuffer (GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER, color_renderbuffer); GL_RENDERBUFFER, color_renderbuffer);
/* Get renderbuffer width/height */ /* Get renderbuffer width/height */
@ -203,7 +203,7 @@ gst_gl_context_eagl_update_layer (GstGLContext * context)
glBindRenderbuffer (GL_RENDERBUFFER, 0); glBindRenderbuffer (GL_RENDERBUFFER, 0);
glBindFramebuffer (GL_FRAMEBUFFER, 0); glBindFramebuffer (GL_FRAMEBUFFER, 0);
priv->eagl_layer = eagl_layer; priv->eagl_layer = (__bridge_retained gpointer)eagl_layer;
priv->framebuffer = framebuffer; priv->framebuffer = framebuffer;
priv->color_renderbuffer = color_renderbuffer; priv->color_renderbuffer = color_renderbuffer;
priv->depth_renderbuffer = depth_renderbuffer; priv->depth_renderbuffer = depth_renderbuffer;
@ -222,16 +222,16 @@ gst_gl_context_eagl_create_context (GstGLContext * context, GstGLAPI gl_api,
EAGLSharegroup *share_group; EAGLSharegroup *share_group;
if (other_context) { if (other_context) {
EAGLContext *external_gl_context = (EAGLContext *) EAGLContext *external_gl_context = (__bridge EAGLContext *)(void *)
gst_gl_context_get_gl_context (other_context); gst_gl_context_get_gl_context (other_context);
share_group = [external_gl_context sharegroup]; share_group = [external_gl_context sharegroup];
} else { } else {
share_group = nil; share_group = nil;
} }
priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group]; priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3 sharegroup:share_group];
if (!priv->eagl_context) { if (!priv->eagl_context) {
priv->eagl_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group]; priv->eagl_context = (__bridge_retained gpointer)[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:share_group];
} }
if (!priv->eagl_context) { if (!priv->eagl_context) {
g_set_error_literal (error, GST_GL_CONTEXT_ERROR, g_set_error_literal (error, GST_GL_CONTEXT_ERROR,
@ -240,9 +240,6 @@ gst_gl_context_eagl_create_context (GstGLContext * context, GstGLAPI gl_api,
return FALSE; return FALSE;
} }
if (share_group)
[share_group release];
priv->eagl_layer = NULL; priv->eagl_layer = NULL;
priv->framebuffer = 0; priv->framebuffer = 0;
priv->color_renderbuffer = 0; priv->color_renderbuffer = 0;
@ -266,8 +263,8 @@ gst_gl_context_eagl_destroy_context (GstGLContext * context)
gst_gl_context_eagl_release_layer (context); gst_gl_context_eagl_release_layer (context);
[context_eagl->priv->eagl_context release]; CFRelease(context_eagl->priv->eagl_context);
context_eagl->priv->eagl_context = nil; context_eagl->priv->eagl_context = NULL;
} }
static gboolean static gboolean
@ -284,7 +281,7 @@ gst_gl_context_eagl_choose_format (GstGLContext * context, GError ** error)
return TRUE; return TRUE;
if (window) if (window)
window_handle = (UIView *) gst_gl_window_get_window_handle (window); window_handle = (__bridge UIView *)(void *)gst_gl_window_get_window_handle (window);
if (!window_handle) { if (!window_handle) {
gst_object_unref (window); gst_object_unref (window);
@ -341,7 +338,7 @@ gst_gl_context_eagl_swap_buffers (GstGLContext * context)
if (!context_eagl->priv->eagl_layer) if (!context_eagl->priv->eagl_layer)
return; return;
[context_eagl->priv->eagl_context presentRenderbuffer:GL_RENDERBUFFER]; [GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl) presentRenderbuffer:GL_RENDERBUFFER];
} }
static gboolean static gboolean
@ -360,7 +357,7 @@ gst_gl_context_eagl_activate (GstGLContext * context, gboolean activate)
} }
GST_DEBUG ("Attaching context to thread %p", g_thread_self ()); GST_DEBUG ("Attaching context to thread %p", g_thread_self ());
if ([EAGLContext setCurrentContext:context_eagl->priv->eagl_context] == NO) { if ([EAGLContext setCurrentContext:GS_GL_CONTEXT_EAGL_CONTEXT(context_eagl)] == NO) {
GST_ERROR ("Couldn't make context current"); GST_ERROR ("Couldn't make context current");
return FALSE; return FALSE;
} }

View file

@ -33,6 +33,11 @@ G_BEGIN_DECLS
#define GST_IS_GL_WINDOW_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_WINDOW_EAGL)) #define GST_IS_GL_WINDOW_EAGL_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE((k), GST_TYPE_GL_WINDOW_EAGL))
#define GST_GL_WINDOW_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_WINDOW_EAGL, GstGLWindowEaglClass)) #define GST_GL_WINDOW_EAGL_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS((o), GST_TYPE_GL_WINDOW_EAGL, GstGLWindowEaglClass))
#define GS_GL_WINDOW_EAGL_VIEW(obj) \
((__bridge UIView *)(obj->priv->view))
#define GS_GL_WINDOW_EAGL_QUEUE(obj) \
((__bridge dispatch_queue_t)(obj->priv->gl_queue))
typedef struct _GstGLWindowEagl GstGLWindowEagl; typedef struct _GstGLWindowEagl GstGLWindowEagl;
typedef struct _GstGLWindowEaglPrivate GstGLWindowEaglPrivate; typedef struct _GstGLWindowEaglPrivate GstGLWindowEaglPrivate;
typedef struct _GstGLWindowEaglClass GstGLWindowEaglClass; typedef struct _GstGLWindowEaglClass GstGLWindowEaglClass;

View file

@ -54,10 +54,10 @@ static void gst_gl_window_eagl_send_message_async (GstGLWindow * window,
struct _GstGLWindowEaglPrivate struct _GstGLWindowEaglPrivate
{ {
UIView *view; gpointer view;
gint window_width, window_height; gint window_width, window_height;
gint preferred_width, preferred_height; gint preferred_width, preferred_height;
dispatch_queue_t gl_queue; gpointer gl_queue;
}; };
static void static void
@ -88,14 +88,14 @@ gst_gl_window_eagl_init (GstGLWindowEagl * window)
{ {
window->priv = GST_GL_WINDOW_EAGL_GET_PRIVATE (window); window->priv = GST_GL_WINDOW_EAGL_GET_PRIVATE (window);
window->priv->gl_queue = window->priv->gl_queue =
dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL); (__bridge_retained gpointer)dispatch_queue_create ("org.freedesktop.gstreamer.glwindow", NULL);
} }
static void static void
gst_gl_window_eagl_finalize (GObject * object) gst_gl_window_eagl_finalize (GObject * object)
{ {
GstGLWindowEagl *window = GST_GL_WINDOW_EAGL (object); GstGLWindowEagl *window = GST_GL_WINDOW_EAGL (object);
dispatch_release (window->priv->gl_queue); CFRelease(window->priv->gl_queue);
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
@ -128,7 +128,7 @@ gst_gl_window_eagl_set_window_handle (GstGLWindow * window, guintptr handle)
window_eagl = GST_GL_WINDOW_EAGL (window); window_eagl = GST_GL_WINDOW_EAGL (window);
context = gst_gl_window_get_context (window); context = gst_gl_window_get_context (window);
window_eagl->priv->view = (UIView *) handle; window_eagl->priv->view = (gpointer)handle;
GST_INFO_OBJECT (context, "handle set, updating layer"); GST_INFO_OBJECT (context, "handle set, updating layer");
gst_gl_context_eagl_update_layer (context); gst_gl_context_eagl_update_layer (context);
@ -159,10 +159,10 @@ gst_gl_window_eagl_send_message_async (GstGLWindow * window,
destroy (data); destroy (data);
gst_object_unref (context); gst_object_unref (context);
} else { } else {
dispatch_async (window_eagl->priv->gl_queue, ^{ dispatch_async ((__bridge dispatch_queue_t)(window_eagl->priv->gl_queue), ^{
gst_gl_context_activate (context, TRUE); gst_gl_context_activate (context, TRUE);
gst_object_unref (context);
callback (data); callback (data);
gst_object_unref (context);
if (destroy) if (destroy)
destroy (data); destroy (data);
}); });
@ -184,7 +184,7 @@ draw_cb (gpointer data)
CGSize size; CGSize size;
CAEAGLLayer *eagl_layer; CAEAGLLayer *eagl_layer;
eagl_layer = (CAEAGLLayer *)[window_eagl->priv->view layer]; eagl_layer = (CAEAGLLayer *)[GS_GL_WINDOW_EAGL_VIEW(window_eagl) layer];
size = eagl_layer.frame.size; size = eagl_layer.frame.size;
if (window->queue_resize || window_eagl->priv->window_width != size.width || if (window->queue_resize || window_eagl->priv->window_width != size.width ||

View file

@ -35,6 +35,7 @@ endif
libgstapplemedia_la_OBJCFLAGS = \ libgstapplemedia_la_OBJCFLAGS = \
-I$(top_srcdir)/gst-libs \ -I$(top_srcdir)/gst-libs \
-I$(top_builddir)/gst-libs \ -I$(top_builddir)/gst-libs \
-fobjc-arc \
$(GST_OBJCFLAGS_WITH_VERSION) \ $(GST_OBJCFLAGS_WITH_VERSION) \
$(GST_BASE_CFLAGS) \ $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)

View file

@ -37,6 +37,8 @@ G_BEGIN_DECLS
#define GST_TYPE_AVF_ASSET_SRC \ #define GST_TYPE_AVF_ASSET_SRC \
(gst_avf_asset_src_get_type()) (gst_avf_asset_src_get_type())
#define GST_AVF_ASSET_SRC_READER(obj) \
((__bridge GstAVFAssetReader *)(obj->reader))
#define GST_AVF_ASSET_SRC(obj) \ #define GST_AVF_ASSET_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVF_ASSET_SRC,GstAVFAssetSrc)) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVF_ASSET_SRC,GstAVFAssetSrc))
#define GST_AVF_ASSET_SRC_CLASS(klass) \ #define GST_AVF_ASSET_SRC_CLASS(klass) \
@ -95,9 +97,9 @@ typedef enum
- (void) start : (GError **) error; - (void) start : (GError **) error;
- (void) stop; - (void) stop;
- (void) seekTo: (guint64) start : (guint64) stop : (GError **) error; - (void) seekTo: (guint64) start : (guint64) stop : (GError **) error;
- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type; - (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type;
- (GstCaps *) getCaps: (GstAVFAssetReaderMediaType) type; - (GstCaps *) getCaps: (GstAVFAssetReaderMediaType) type;
- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index; - (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index;
- (GstBuffer *) nextBuffer: (GstAVFAssetReaderMediaType) type : (GError **) error; - (GstBuffer *) nextBuffer: (GstAVFAssetReaderMediaType) type : (GError **) error;
@end @end
@ -110,7 +112,10 @@ struct _GstAVFAssetSrc
gint selected_video_track; gint selected_video_track;
gint selected_audio_track; gint selected_audio_track;
GstAVFAssetReader *reader; /* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer reader;
GstAVFAssetSrcState state; GstAVFAssetSrcState state;
GMutex lock; GMutex lock;
GstEvent *seek_event; GstEvent *seek_event;

View file

@ -51,15 +51,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_avf_asset_src_debug);
#define MEDIA_TYPE_TO_STR(x) \ #define MEDIA_TYPE_TO_STR(x) \
(x == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO ? "audio" : "video") (x == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO ? "audio" : "video")
#define AVF_ASSET_READER_HAS_AUDIO(x) \ #define AVF_ASSET_READER_HAS_AUDIO(x) \
([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]) ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])
#define AVF_ASSET_READER_HAS_VIDEO(x) \ #define AVF_ASSET_READER_HAS_VIDEO(x) \
([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]) ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
enum enum
{ {
@ -242,7 +236,6 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)), gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition))); gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
OBJC_CALLOUT_BEGIN ();
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY: { case GST_STATE_CHANGE_NULL_TO_READY: {
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED; self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
@ -252,7 +245,7 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_avf_asset_src_stop_all (self); gst_avf_asset_src_stop_all (self);
return GST_STATE_CHANGE_FAILURE; return GST_STATE_CHANGE_FAILURE;
} }
self->reader = [[GstAVFAssetReader alloc] initWithURI:self->uri:&error]; self->reader = (__bridge_retained gpointer)([[GstAVFAssetReader alloc] initWithURI:self->uri:&error]);
if (error) { if (error) {
GST_ELEMENT_ERROR (element, RESOURCE, FAILED, ("AVFAssetReader error"), GST_ELEMENT_ERROR (element, RESOURCE, FAILED, ("AVFAssetReader error"),
("%s", error->message)); ("%s", error->message));
@ -282,12 +275,11 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition)
gst_avf_asset_src_stop (self); gst_avf_asset_src_stop (self);
break; break;
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
[self->reader release]; CFBridgingRelease(self->reader);
break; break;
default: default:
break; break;
} }
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -323,18 +315,18 @@ gst_avf_asset_src_query (GstPad *pad, GstObject * parent, GstQuery *query)
ret = TRUE; ret = TRUE;
break; break;
case GST_QUERY_DURATION: case GST_QUERY_DURATION:
gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration); gst_query_set_duration (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE; ret = TRUE;
break; break;
case GST_QUERY_POSITION: case GST_QUERY_POSITION:
gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position); gst_query_set_position (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).position);
ret = TRUE; ret = TRUE;
break; break;
case GST_QUERY_SEEKING: { case GST_QUERY_SEEKING: {
GstFormat fmt; GstFormat fmt;
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL); gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
if (fmt == GST_FORMAT_TIME) { if (fmt == GST_FORMAT_TIME) {
gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, self->reader.duration); gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, GST_AVF_ASSET_SRC_READER(self).duration);
ret = TRUE; ret = TRUE;
} }
break; break;
@ -362,7 +354,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
gboolean res = TRUE; gboolean res = TRUE;
GError *error = NULL; GError *error = NULL;
OBJC_CALLOUT_BEGIN ();
self = GST_AVF_ASSET_SRC (gst_pad_get_parent_element (pad)); self = GST_AVF_ASSET_SRC (gst_pad_get_parent_element (pad));
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
@ -409,7 +400,7 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
stop = GST_CLOCK_TIME_NONE; stop = GST_CLOCK_TIME_NONE;
} }
gst_avf_asset_src_send_event (self, gst_event_new_flush_start ()); gst_avf_asset_src_send_event (self, gst_event_new_flush_start ());
[self->reader seekTo: start: stop: &error]; [GST_AVF_ASSET_SRC_READER(self) seekTo: start: stop: &error];
gst_segment_init (&segment, GST_FORMAT_TIME); gst_segment_init (&segment, GST_FORMAT_TIME);
segment.rate = rate; segment.rate = rate;
@ -439,7 +430,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
} }
gst_object_unref (self); gst_object_unref (self);
OBJC_CALLOUT_END ();
return res; return res;
} }
@ -501,15 +491,14 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
GstFlowReturn ret, combined_ret; GstFlowReturn ret, combined_ret;
GError *error; GError *error;
OBJC_CALLOUT_BEGIN ();
GST_AVF_ASSET_SRC_LOCK (self); GST_AVF_ASSET_SRC_LOCK (self);
if (self->state != GST_AVF_ASSET_SRC_STATE_READING) { if (self->state != GST_AVF_ASSET_SRC_STATE_READING) {
GST_AVF_ASSET_SRC_UNLOCK (self); GST_AVF_ASSET_SRC_UNLOCK (self);
goto exit; return;
} }
buf = [self->reader nextBuffer:type:&error]; buf = [GST_AVF_ASSET_SRC_READER(self) nextBuffer:type:&error];
GST_AVF_ASSET_SRC_UNLOCK (self); GST_AVF_ASSET_SRC_UNLOCK (self);
if (buf == NULL) { if (buf == NULL) {
@ -520,13 +509,13 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_ERROR); gst_avf_asset_src_combine_flows (self, type, GST_FLOW_ERROR);
gst_pad_pause_task (pad); gst_pad_pause_task (pad);
goto exit; return;
} }
gst_pad_push_event (pad, gst_event_new_eos ()); gst_pad_push_event (pad, gst_event_new_eos ());
gst_avf_asset_src_combine_flows (self, type, GST_FLOW_EOS); gst_avf_asset_src_combine_flows (self, type, GST_FLOW_EOS);
gst_pad_pause_task (pad); gst_pad_pause_task (pad);
goto exit; return;
} }
ret = gst_pad_push (pad, buf); ret = gst_pad_push (pad, buf);
@ -547,8 +536,6 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad,
gst_pad_pause_task (pad); gst_pad_pause_task (pad);
} }
exit:
OBJC_CALLOUT_END ();
} }
static void static void
@ -571,9 +558,8 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self)
GError *error = NULL; GError *error = NULL;
gboolean ret = TRUE; gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN ();
[self->reader start: &error]; [GST_AVF_ASSET_SRC_READER(self) start: &error];
if (error != NULL) { if (error != NULL) {
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("AVFAssetReader could not start reading"), ("%s", error->message)); ("AVFAssetReader could not start reading"), ("%s", error->message));
@ -583,7 +569,6 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self)
} }
exit: exit:
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -592,7 +577,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event)
{ {
gboolean ret = TRUE; gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN ();
if (AVF_ASSET_READER_HAS_VIDEO (self)) { if (AVF_ASSET_READER_HAS_VIDEO (self)) {
ret |= gst_pad_push_event (self->videopad, gst_event_ref (event)); ret |= gst_pad_push_event (self->videopad, gst_event_ref (event));
@ -602,7 +586,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event)
} }
gst_event_unref (event); gst_event_unref (event);
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -611,25 +594,24 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
{ {
GstSegment segment; GstSegment segment;
OBJC_CALLOUT_BEGIN ();
if (self->state == GST_AVF_ASSET_SRC_STATE_STARTED) { if (self->state == GST_AVF_ASSET_SRC_STATE_STARTED) {
goto exit; return;
} }
GST_DEBUG_OBJECT (self, "Creating pads and starting reader"); GST_DEBUG_OBJECT (self, "Creating pads and starting reader");
gst_segment_init (&segment, GST_FORMAT_TIME); gst_segment_init (&segment, GST_FORMAT_TIME);
segment.duration = self->reader.duration; segment.duration = GST_AVF_ASSET_SRC_READER(self).duration;
/* We call AVFAssetReader's startReading when the pads are linked /* We call AVFAssetReader's startReading when the pads are linked
* and no outputs can be added afterwards, so the tracks must be * and no outputs can be added afterwards, so the tracks must be
* selected before adding any of the new pads */ * selected before adding any of the new pads */
if (AVF_ASSET_READER_HAS_AUDIO (self)) { if (AVF_ASSET_READER_HAS_AUDIO (self)) {
[self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO: [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO:
self->selected_audio_track]; self->selected_audio_track];
} }
if (AVF_ASSET_READER_HAS_VIDEO (self)) { if (AVF_ASSET_READER_HAS_VIDEO (self)) {
[self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO: [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO:
self->selected_video_track]; self->selected_video_track];
} }
@ -643,9 +625,9 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
gst_pad_set_active (self->audiopad, TRUE); gst_pad_set_active (self->audiopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->audiopad); gst_avf_asset_src_send_start_stream (self, self->audiopad);
gst_pad_set_caps (self->audiopad, gst_pad_set_caps (self->audiopad,
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]); [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]);
gst_pad_push_event (self->audiopad, gst_event_new_caps ( gst_pad_push_event (self->audiopad, gst_event_new_caps (
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])); [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]));
gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment)); gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->audiopad); gst_element_add_pad (GST_ELEMENT (self), self->audiopad);
} }
@ -659,35 +641,31 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self)
gst_pad_set_active (self->videopad, TRUE); gst_pad_set_active (self->videopad, TRUE);
gst_avf_asset_src_send_start_stream (self, self->videopad); gst_avf_asset_src_send_start_stream (self, self->videopad);
gst_pad_set_caps (self->videopad, gst_pad_set_caps (self->videopad,
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]); [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]);
gst_pad_push_event (self->videopad, gst_event_new_caps ( gst_pad_push_event (self->videopad, gst_event_new_caps (
[self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])); [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]));
gst_pad_push_event (self->videopad, gst_event_new_segment (&segment)); gst_pad_push_event (self->videopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->videopad); gst_element_add_pad (GST_ELEMENT (self), self->videopad);
} }
gst_element_no_more_pads (GST_ELEMENT (self)); gst_element_no_more_pads (GST_ELEMENT (self));
self->state = GST_AVF_ASSET_SRC_STATE_STARTED; self->state = GST_AVF_ASSET_SRC_STATE_STARTED;
exit:
OBJC_CALLOUT_END ();
} }
static void static void
gst_avf_asset_src_stop (GstAVFAssetSrc *self) gst_avf_asset_src_stop (GstAVFAssetSrc *self)
{ {
gboolean has_audio, has_video; gboolean has_audio, has_video;
OBJC_CALLOUT_BEGIN();
if (self->state == GST_AVF_ASSET_SRC_STATE_STOPPED) { if (self->state == GST_AVF_ASSET_SRC_STATE_STOPPED) {
goto exit; return;
} }
GST_DEBUG ("Stopping tasks and removing pads"); GST_DEBUG ("Stopping tasks and removing pads");
has_audio = AVF_ASSET_READER_HAS_AUDIO (self); has_audio = AVF_ASSET_READER_HAS_AUDIO (self);
has_video = AVF_ASSET_READER_HAS_VIDEO (self); has_video = AVF_ASSET_READER_HAS_VIDEO (self);
[self->reader stop]; [GST_AVF_ASSET_SRC_READER(self) stop];
if (has_audio) { if (has_audio) {
gst_pad_stop_task (self->audiopad); gst_pad_stop_task (self->audiopad);
@ -699,9 +677,6 @@ gst_avf_asset_src_stop (GstAVFAssetSrc *self)
} }
self->state = GST_AVF_ASSET_SRC_STATE_STOPPED; self->state = GST_AVF_ASSET_SRC_STATE_STOPPED;
exit:
OBJC_CALLOUT_END ();
} }
static gboolean static gboolean
@ -811,7 +786,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro
AVAsset *asset; AVAsset *asset;
gboolean ret = FALSE; gboolean ret = FALSE;
OBJC_CALLOUT_BEGIN ();
str = [NSString stringWithUTF8String: uri]; str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str]; url = [[NSURL alloc] initWithString: str];
asset = [AVAsset assetWithURL: url]; asset = [AVAsset assetWithURL: url];
@ -824,7 +798,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro
g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI, g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
"Invalid URI '%s' for avfassetsrc", uri); "Invalid URI '%s' for avfassetsrc", uri);
} }
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -872,11 +845,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
- (void) releaseReader - (void) releaseReader
{ {
[video_track release]; video_track = nil;
[audio_track release]; audio_track = nil;
[video_tracks release]; video_tracks = nil;
[audio_tracks release]; audio_tracks = nil;
[reader release]; reader = nil;
} }
- (void) initReader: (GError **) error - (void) initReader: (GError **) error
@ -889,13 +862,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
[nserror.description UTF8String]); [nserror.description UTF8String]);
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_INIT, "%s", *error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_INIT, "%s",
[nserror.description UTF8String]); [nserror.description UTF8String]);
[asset release];
[reader release];
return; return;
} }
audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain]; audio_tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain]; video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
GST_INFO ("Found %lu video tracks and %lu audio tracks", GST_INFO ("Found %lu video tracks and %lu audio tracks",
(unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]); (unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]);
@ -911,12 +883,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
str = [NSString stringWithUTF8String: uri]; str = [NSString stringWithUTF8String: uri];
url = [[NSURL alloc] initWithString: str]; url = [[NSURL alloc] initWithString: str];
asset = [[AVAsset assetWithURL: url] retain]; asset = [AVAsset assetWithURL: url];
if (!asset.playable) { if (!asset.playable) {
*error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_NOT_PLAYABLE, *error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_NOT_PLAYABLE,
"Media is not playable"); "Media is not playable");
[asset release]; asset = nil;
return nil; return nil;
} }
@ -940,11 +912,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return self; return self;
} }
- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index - (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index
{ {
NSArray *tracks; NSArray *tracks;
AVAssetTrack *track; AVAssetTrack *track;
AVAssetReaderOutput **output; AVAssetReaderOutput * __strong *output;
NSDictionary *settings; NSDictionary *settings;
NSString *mediaType; NSString *mediaType;
gint *selected_track; gint *selected_track;
@ -978,7 +950,6 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
*output = [AVAssetReaderTrackOutput *output = [AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:track assetReaderTrackOutputWithTrack:track
outputSettings:settings]; outputSettings:settings];
[*output retain];
[reader addOutput:*output]; [reader addOutput:*output];
return TRUE; return TRUE;
} }
@ -999,11 +970,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
- (void) stop - (void) stop
{ {
[self->reader cancelReading]; [reader cancelReading];
reading = FALSE; reading = FALSE;
} }
- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type - (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type
{ {
if (type == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO) { if (type == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO) {
return [audio_tracks count] != 0; return [audio_tracks count] != 0;
@ -1122,10 +1093,9 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return caps; return caps;
} }
- (oneway void) release - (void) dealloc
{ {
[asset release]; asset = nil;
[self releaseReader]; [self releaseReader];
if (audio_caps != NULL) { if (audio_caps != NULL) {

View file

@ -33,7 +33,7 @@ G_BEGIN_DECLS
#define GST_AVF_VIDEO_SRC_CLASS(klass) \ #define GST_AVF_VIDEO_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVF_VIDEO_SRC, GstAVFVideoSrcClass)) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVF_VIDEO_SRC, GstAVFVideoSrcClass))
#define GST_AVF_VIDEO_SRC_IMPL(obj) \ #define GST_AVF_VIDEO_SRC_IMPL(obj) \
((GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl) ((__bridge GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl)
#define GST_IS_AVF_VIDEO_SRC(obj) \ #define GST_IS_AVF_VIDEO_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_VIDEO_SRC)) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_VIDEO_SRC))
#define GST_IS_AVF_VIDEO_SRC_CLASS(klass) \ #define GST_IS_AVF_VIDEO_SRC_CLASS(klass) \
@ -46,6 +46,8 @@ struct _GstAVFVideoSrc
{ {
GstPushSrc push_src; GstPushSrc push_src;
/* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer impl; gpointer impl;
}; };

View file

@ -201,12 +201,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- (void)finalize - (void)finalize
{ {
dispatch_release (mainQueue);
mainQueue = NULL; mainQueue = NULL;
dispatch_release (workerQueue);
workerQueue = NULL; workerQueue = NULL;
[super finalize];
} }
- (BOOL)openDeviceInput - (BOOL)openDeviceInput
@ -231,7 +227,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
device = [devices objectAtIndex:deviceIndex]; device = [devices objectAtIndex:deviceIndex];
} }
g_assert (device != nil); g_assert (device != nil);
[device retain];
GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]); GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]);
@ -242,11 +237,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
("Failed to open device: %s", ("Failed to open device: %s",
[[err localizedDescription] UTF8String]), [[err localizedDescription] UTF8String]),
(NULL)); (NULL));
[device release];
device = nil; device = nil;
return NO; return NO;
} }
[input retain];
return YES; return YES;
} }
@ -280,7 +273,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
} }
screenInput.capturesMouseClicks = captureScreenMouseClicks; screenInput.capturesMouseClicks = captureScreenMouseClicks;
input = screenInput; input = screenInput;
[input retain];
return YES; return YES;
#endif #endif
} }
@ -337,17 +329,13 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
[session removeInput:input]; [session removeInput:input];
[session removeOutput:output]; [session removeOutput:output];
[session release];
session = nil; session = nil;
[input release];
input = nil; input = nil;
[output release];
output = nil; output = nil;
if (!captureScreen) { if (!captureScreen) {
[device release];
device = nil; device = nil;
} }
@ -457,7 +445,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
CMVideoDimensions dimensions; CMVideoDimensions dimensions;
/* formatDescription can't be retrieved with valueForKey so use a selector here */ /* formatDescription can't be retrieved with valueForKey so use a selector here */
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) { for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
int min_fps_n, min_fps_d, max_fps_n, max_fps_d; int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
@ -529,7 +517,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
CMFormatDescriptionRef formatDescription; CMFormatDescriptionRef formatDescription;
CMVideoDimensions dimensions; CMVideoDimensions dimensions;
formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (dimensions.width == info->width && dimensions.height == info->height) { if (dimensions.width == info->width && dimensions.height == info->height) {
found_format = TRUE; found_format = TRUE;
@ -805,9 +793,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
dispatch_sync (mainQueue, ^{ [session stopRunning]; }); dispatch_sync (mainQueue, ^{ [session stopRunning]; });
dispatch_sync (workerQueue, ^{}); dispatch_sync (workerQueue, ^{});
[bufQueueLock release];
bufQueueLock = nil; bufQueueLock = nil;
[bufQueue release];
bufQueue = nil; bufQueue = nil;
if (textureCache) if (textureCache)
@ -902,7 +888,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
if ([bufQueue count] == BUFFER_QUEUE_SIZE) if ([bufQueue count] == BUFFER_QUEUE_SIZE)
[bufQueue removeLastObject]; [bufQueue removeLastObject];
[bufQueue insertObject:@{@"sbuf": (id)sampleBuffer, [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
@"timestamp": @(timestamp), @"timestamp": @(timestamp),
@"duration": @(duration)} @"duration": @(duration)}
atIndex:0]; atIndex:0];
@ -925,7 +911,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
} }
NSDictionary *dic = (NSDictionary *) [bufQueue lastObject]; NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
sbuf = (CMSampleBufferRef) dic[@"sbuf"]; sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue]; timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
duration = (GstClockTime) [dic[@"duration"] longLongValue]; duration = (GstClockTime) [dic[@"duration"] longLongValue];
CFRetain (sbuf); CFRetain (sbuf);
@ -1224,28 +1210,16 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
0, "iOS AVFoundation video source"); 0, "iOS AVFoundation video source");
} }
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
static void static void
gst_avf_video_src_init (GstAVFVideoSrc * src) gst_avf_video_src_init (GstAVFVideoSrc * src)
{ {
OBJC_CALLOUT_BEGIN (); src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
OBJC_CALLOUT_END ();
} }
static void static void
gst_avf_video_src_finalize (GObject * obj) gst_avf_video_src_finalize (GObject * obj)
{ {
OBJC_CALLOUT_BEGIN (); CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
[GST_AVF_VIDEO_SRC_IMPL (obj) release];
OBJC_CALLOUT_END ();
G_OBJECT_CLASS (parent_class)->finalize (obj); G_OBJECT_CLASS (parent_class)->finalize (obj);
} }
@ -1320,9 +1294,7 @@ gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
{ {
GstStateChangeReturn ret; GstStateChangeReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition]; ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1332,9 +1304,7 @@ gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{ {
GstCaps *ret; GstCaps *ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1344,9 +1314,7 @@ gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1356,9 +1324,7 @@ gst_avf_video_src_start (GstBaseSrc * basesrc)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1368,9 +1334,7 @@ gst_avf_video_src_stop (GstBaseSrc * basesrc)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1380,9 +1344,7 @@ gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1392,9 +1354,7 @@ gst_avf_video_src_unlock (GstBaseSrc * basesrc)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1404,9 +1364,7 @@ gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop]; ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1416,9 +1374,7 @@ gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
{ {
GstFlowReturn ret; GstFlowReturn ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf]; ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1429,9 +1385,7 @@ gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{ {
GstCaps *ret; GstCaps *ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps]; ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1442,9 +1396,7 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
{ {
gboolean ret; gboolean ret;
OBJC_CALLOUT_BEGIN ();
ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query]; ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -1452,7 +1404,5 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
static void static void
gst_avf_video_src_set_context (GstElement * element, GstContext * context) gst_avf_video_src_set_context (GstElement * element, GstContext * context)
{ {
OBJC_CALLOUT_BEGIN ();
[GST_AVF_VIDEO_SRC_IMPL (element) setContext:context]; [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
OBJC_CALLOUT_END ();
} }

View file

@ -42,6 +42,8 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AV_SAMPLE_VIDEO_SINK)) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
#define GST_IS_AV_SAMPLE_VIDEO_SINK_CLASS(klass) \ #define GST_IS_AV_SAMPLE_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AV_SAMPLE_VIDEO_SINK)) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AV_SAMPLE_VIDEO_SINK))
#define GST_AV_SAMPLE_VIDEO_SINK_LAYER(obj) \
((__bridge AVSampleBufferDisplayLayer *)(obj->layer))
typedef struct _GstAVSampleVideoSink GstAVSampleVideoSink; typedef struct _GstAVSampleVideoSink GstAVSampleVideoSink;
typedef struct _GstAVSampleVideoSinkClass GstAVSampleVideoSinkClass; typedef struct _GstAVSampleVideoSinkClass GstAVSampleVideoSinkClass;
@ -50,7 +52,9 @@ struct _GstAVSampleVideoSink
{ {
GstVideoSink video_sink; GstVideoSink video_sink;
AVSampleBufferDisplayLayer *layer; /* NOTE: ARC no longer allows Objective-C pointers in structs. */
/* Instead, use gpointer with explicit __bridge_* calls */
gpointer layer;
GstVideoInfo info; GstVideoInfo info;

View file

@ -156,11 +156,11 @@ static void
gst_av_sample_video_sink_finalize (GObject * object) gst_av_sample_video_sink_finalize (GObject * object)
{ {
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object); GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
__block AVSampleBufferDisplayLayer *layer = av_sink->layer; __block gpointer layer = av_sink->layer;
if (layer) { if (layer) {
dispatch_async (dispatch_get_main_queue (), ^{ dispatch_async (dispatch_get_main_queue (), ^{
[layer release]; CFBridgingRelease(layer);
}); });
} }
@ -198,19 +198,21 @@ gst_av_sample_video_sink_start (GstBaseSink * bsink)
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink); GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
if ([NSThread isMainThread]) { if ([NSThread isMainThread]) {
av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init]; AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio) if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; layer.videoGravity = AVLayerVideoGravityResizeAspect;
else else
av_sink->layer.videoGravity = AVLayerVideoGravityResize; layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer"); g_object_notify (G_OBJECT (av_sink), "layer");
} else { } else {
dispatch_sync (dispatch_get_main_queue (), ^{ dispatch_sync (dispatch_get_main_queue (), ^{
av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init]; AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio) if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; layer.videoGravity = AVLayerVideoGravityResizeAspect;
else else
av_sink->layer.videoGravity = AVLayerVideoGravityResize; layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer"); g_object_notify (G_OBJECT (av_sink), "layer");
}); });
} }
@ -224,7 +226,7 @@ _stop_requesting_data (GstAVSampleVideoSink * av_sink)
{ {
if (av_sink->layer) { if (av_sink->layer) {
if (av_sink->layer_requesting_data) if (av_sink->layer_requesting_data)
[av_sink->layer stopRequestingMediaData]; [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
av_sink->layer_requesting_data = FALSE; av_sink->layer_requesting_data = FALSE;
} }
} }
@ -243,7 +245,7 @@ gst_av_sample_video_sink_stop (GstBaseSink * bsink)
g_mutex_lock (&av_sink->render_lock); g_mutex_lock (&av_sink->render_lock);
_stop_requesting_data (av_sink); _stop_requesting_data (av_sink);
g_mutex_unlock (&av_sink->render_lock); g_mutex_unlock (&av_sink->render_lock);
[av_sink->layer flushAndRemoveImage]; [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
} }
return TRUE; return TRUE;
@ -661,11 +663,12 @@ _enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf)
kCFBooleanTrue); kCFBooleanTrue);
} }
AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if (av_sink->keep_aspect_ratio) if (av_sink->keep_aspect_ratio)
av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; layer.videoGravity = AVLayerVideoGravityResizeAspect;
else else
av_sink->layer.videoGravity = AVLayerVideoGravityResize; layer.videoGravity = AVLayerVideoGravityResize;
[av_sink->layer enqueueSampleBuffer:sample_buf]; [layer enqueueSampleBuffer:sample_buf];
CFRelease (pbuf); CFRelease (pbuf);
CFRelease (sample_buf); CFRelease (sample_buf);
@ -678,13 +681,14 @@ _request_data (GstAVSampleVideoSink * av_sink)
{ {
av_sink->layer_requesting_data = TRUE; av_sink->layer_requesting_data = TRUE;
[av_sink->layer requestMediaDataWhenReadyOnQueue: AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
[layer requestMediaDataWhenReadyOnQueue:
dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
usingBlock:^{ usingBlock:^{
while (TRUE) { while (TRUE) {
/* don't needlessly fill up avsamplebufferdisplaylayer's queue. /* don't needlessly fill up avsamplebufferdisplaylayer's queue.
* This also allows us to skip displaying late frames */ * This also allows us to skip displaying late frames */
if (!av_sink->layer.readyForMoreMediaData) if (!layer.readyForMoreMediaData)
break; break;
g_mutex_lock (&av_sink->render_lock); g_mutex_lock (&av_sink->render_lock);
@ -752,9 +756,10 @@ gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \ MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \ defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4 MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
if ([av_sink->layer status] == AVQueuedSampleBufferRenderingStatusFailed) { AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s", GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
[[[av_sink->layer error] description] UTF8String]); [[[layer error] description] UTF8String]);
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
#endif #endif

View file

@ -44,6 +44,12 @@ G_BEGIN_DECLS
#define GST_IS_IOS_ASSET_SRC_CLASS(klass) \ #define GST_IS_IOS_ASSET_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IOS_ASSET_SRC)) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IOS_ASSET_SRC))
#define GST_IOS_ASSET_SRC_CAST(obj) ((GstIOSAssetSrc*) obj) #define GST_IOS_ASSET_SRC_CAST(obj) ((GstIOSAssetSrc*) obj)
#define GST_IOS_ASSET_SRC_ASSET(obj) \
(__bridge ALAssetRepresentation *)(obj->asset)
#define GST_IOS_ASSET_SRC_LIBRARY(obj) \
(__bridge GstAssetsLibrary *)(obj->library)
#define GST_IOS_ASSET_SRC_URL(obj) \
(__bridge NSURL *)(obj->url)
typedef struct _GstIOSAssetSrc GstIOSAssetSrc; typedef struct _GstIOSAssetSrc GstIOSAssetSrc;
typedef struct _GstIOSAssetSrcClass GstIOSAssetSrcClass; typedef struct _GstIOSAssetSrcClass GstIOSAssetSrcClass;
@ -68,9 +74,12 @@ struct _GstIOSAssetSrc {
/*< private >*/ /*< private >*/
gchar * uri; /* asset uri */ gchar * uri; /* asset uri */
NSURL * url; /* asset url */
ALAssetRepresentation * asset; /* asset representation */ /* NOTE: ARC no longer allows Objective-C pointers in structs. */
GstAssetsLibrary * library; /* assets library */ /* Instead, use gpointer with explicit __bridge_* calls */
gpointer url; /* asset url */
gpointer asset; /* asset representation */
gpointer library; /* assets library */
}; };
struct _GstIOSAssetSrcClass { struct _GstIOSAssetSrcClass {

View file

@ -52,12 +52,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_ios_asset_src_debug);
#define DEFAULT_BLOCKSIZE 4*1024 #define DEFAULT_BLOCKSIZE 4*1024
#define OBJC_CALLOUT_BEGIN() \
NSAutoreleasePool *pool; \
\
pool = [[NSAutoreleasePool alloc] init]
#define OBJC_CALLOUT_END() \
[pool release]
enum enum
{ {
@ -142,25 +136,22 @@ gst_ios_asset_src_class_init (GstIOSAssetSrcClass * klass)
static void static void
gst_ios_asset_src_init (GstIOSAssetSrc * src) gst_ios_asset_src_init (GstIOSAssetSrc * src)
{ {
OBJC_CALLOUT_BEGIN ();
src->uri = NULL; src->uri = NULL;
src->asset = NULL; src->asset = NULL;
src->library = [[[GstAssetsLibrary alloc] init] retain]; src->library = (__bridge_retained gpointer)[[GstAssetsLibrary alloc] init];
gst_base_src_set_blocksize (GST_BASE_SRC (src), DEFAULT_BLOCKSIZE); gst_base_src_set_blocksize (GST_BASE_SRC (src), DEFAULT_BLOCKSIZE);
OBJC_CALLOUT_END ();
} }
static void static void
gst_ios_asset_src_free_resources (GstIOSAssetSrc *src) gst_ios_asset_src_free_resources (GstIOSAssetSrc *src)
{ {
OBJC_CALLOUT_BEGIN ();
if (src->asset != NULL) { if (src->asset != NULL) {
[src->asset release]; CFBridgingRelease(src->asset);
src->asset = NULL; src->asset = NULL;
} }
if (src->url != NULL) { if (src->url != NULL) {
[src->url release]; CFBridgingRelease(src->url);
src->url = NULL; src->url = NULL;
} }
@ -168,7 +159,6 @@ gst_ios_asset_src_free_resources (GstIOSAssetSrc *src)
g_free (src->uri); g_free (src->uri);
src->uri = NULL; src->uri = NULL;
} }
OBJC_CALLOUT_END ();
} }
static void static void
@ -176,12 +166,10 @@ gst_ios_asset_src_finalize (GObject * object)
{ {
GstIOSAssetSrc *src; GstIOSAssetSrc *src;
OBJC_CALLOUT_BEGIN ();
src = GST_IOS_ASSET_SRC (object); src = GST_IOS_ASSET_SRC (object);
gst_ios_asset_src_free_resources (src); gst_ios_asset_src_free_resources (src);
[src->library release]; CFBridgingRelease(src->library);
OBJC_CALLOUT_END ();
G_OBJECT_CLASS (gst_ios_asset_src_parent_class)->finalize (object); G_OBJECT_CLASS (gst_ios_asset_src_parent_class)->finalize (object);
} }
@ -192,7 +180,6 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err
NSString *nsuristr; NSString *nsuristr;
NSURL *url; NSURL *url;
OBJC_CALLOUT_BEGIN ();
/* the element must be stopped in order to do this */ /* the element must be stopped in order to do this */
GST_OBJECT_LOCK (src); GST_OBJECT_LOCK (src);
state = GST_STATE (src); state = GST_STATE (src);
@ -213,11 +200,10 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err
} }
GST_INFO_OBJECT (src, "URI : %s", src->uri); GST_INFO_OBJECT (src, "URI : %s", src->uri);
src->url = url; src->url = (__bridge_retained gpointer)url;
src->uri = g_strdup (uri); src->uri = g_strdup (uri);
g_object_notify (G_OBJECT (src), "uri"); g_object_notify (G_OBJECT (src), "uri");
OBJC_CALLOUT_END ();
return TRUE; return TRUE;
/* ERROR */ /* ERROR */
@ -229,7 +215,6 @@ wrong_state:
"Changing the 'uri' property on iosassetsrc when an asset is " "Changing the 'uri' property on iosassetsrc when an asset is "
"open is not supported."); "open is not supported.");
GST_OBJECT_UNLOCK (src); GST_OBJECT_UNLOCK (src);
OBJC_CALLOUT_END ();
return FALSE; return FALSE;
} }
} }
@ -285,7 +270,6 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
GstFlowReturn ret; GstFlowReturn ret;
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN ();
buf = gst_buffer_new_and_alloc (length); buf = gst_buffer_new_and_alloc (length);
if (G_UNLIKELY (buf == NULL && length > 0)) { if (G_UNLIKELY (buf == NULL && length > 0)) {
GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length); GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length);
@ -296,10 +280,10 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length,
gst_buffer_map (buf, &info, GST_MAP_READWRITE); gst_buffer_map (buf, &info, GST_MAP_READWRITE);
/* No need to read anything if length is 0 */ /* No need to read anything if length is 0 */
bytes_read = [src->asset getBytes: info.data bytes_read = [GST_IOS_ASSET_SRC_ASSET(src) getBytes: info.data
fromOffset:offset fromOffset:offset
length:length length:length
error:&err]; error:&err];
if (G_UNLIKELY (err != NULL)) { if (G_UNLIKELY (err != NULL)) {
goto could_not_read; goto could_not_read;
} }
@ -333,7 +317,6 @@ could_not_read:
} }
exit: exit:
{ {
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -374,9 +357,7 @@ gst_ios_asset_src_get_size (GstBaseSrc * basesrc, guint64 * size)
src = GST_IOS_ASSET_SRC (basesrc); src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN (); *size = (guint64) [GST_IOS_ASSET_SRC_ASSET(src) size];
*size = (guint64) [src->asset size];
OBJC_CALLOUT_END ();
return TRUE; return TRUE;
} }
@ -386,8 +367,7 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc)
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
gboolean ret = TRUE; gboolean ret = TRUE;
OBJC_CALLOUT_BEGIN (); src->asset = (__bridge_retained gpointer)[GST_IOS_ASSET_SRC_LIBRARY(src) assetForURLSync: GST_IOS_ASSET_SRC_URL(src)];
src->asset = [[src->library assetForURLSync: src->url] retain];
if (src->asset == NULL) { if (src->asset == NULL) {
GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ,
@ -396,7 +376,6 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc)
ret = FALSE; ret = FALSE;
}; };
OBJC_CALLOUT_END ();
return ret; return ret;
} }
@ -406,9 +385,7 @@ gst_ios_asset_src_stop (GstBaseSrc * basesrc)
{ {
GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc);
OBJC_CALLOUT_BEGIN (); CFBridgingRelease(src->asset);
[src->asset release];
OBJC_CALLOUT_END ();
return TRUE; return TRUE;
} }
@ -480,24 +457,23 @@ gst_ios_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
dispatch_async(queue, ^{ dispatch_async(queue, ^{
[self assetForURL:uri resultBlock: [self assetForURL:uri resultBlock:
^(ALAsset *myasset) ^(ALAsset *myasset)
{ {
self.asset = myasset; self.asset = myasset;
self.result = [myasset defaultRepresentation]; self.result = [myasset defaultRepresentation];
dispatch_semaphore_signal(sema); dispatch_semaphore_signal(sema);
} }
failureBlock: failureBlock:
^(NSError *myerror) ^(NSError *myerror)
{ {
self.result = nil; self.result = nil;
dispatch_semaphore_signal(sema); dispatch_semaphore_signal(sema);
} }
]; ];
}); });
dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER); dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
dispatch_release(sema);
return self.result; return self.result;
} }

View file

@ -56,7 +56,6 @@ enable_mt_mode (void)
{ {
NSThread * th = [[NSThread alloc] init]; NSThread * th = [[NSThread alloc] init];
[th start]; [th start];
[th release];
g_assert ([NSThread isMultiThreaded]); g_assert ([NSThread isMultiThreaded]);
} }
#endif #endif

View file

@ -45,10 +45,10 @@ typedef struct _ContextThreadData
typedef struct _TextureWrapper typedef struct _TextureWrapper
{ {
#if HAVE_IOS #if HAVE_IOS
CVOpenGLESTextureCacheRef *cache; CVOpenGLESTextureCacheRef cache;
CVOpenGLESTextureRef texture; CVOpenGLESTextureRef texture;
#else #else
CVOpenGLTextureCacheRef *cache; CVOpenGLTextureCacheRef cache;
CVOpenGLTextureRef texture; CVOpenGLTextureRef texture;
#endif #endif
@ -69,7 +69,7 @@ gst_video_texture_cache_new (GstGLContext * ctx)
CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks, CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks); &kCFTypeDictionaryValueCallBacks);
CVOpenGLESTextureCacheCreate (kCFAllocatorDefault, (CFDictionaryRef) cache_attrs, CVOpenGLESTextureCacheCreate (kCFAllocatorDefault, (CFDictionaryRef) cache_attrs,
(CVEAGLContext) gst_gl_context_get_gl_context (ctx), NULL, &cache->cache); (__bridge CVEAGLContext) (gpointer)gst_gl_context_get_gl_context (ctx), NULL, &cache->cache);
#else #else
gst_ios_surface_memory_init (); gst_ios_surface_memory_init ();
#if 0 #if 0
@ -199,7 +199,8 @@ _do_create_memory (GstGLContext * context, ContextThreadData * data)
success: { success: {
TextureWrapper *texture_data = g_new(TextureWrapper, 1); TextureWrapper *texture_data = g_new(TextureWrapper, 1);
texture_data->cache = CFRetain(cache->cache); CFRetain(cache->cache);
texture_data->cache = cache->cache;
texture_data->texture = texture; texture_data->texture = texture;
gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture)); gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture));
memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size); memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size);

View file

@ -5,6 +5,7 @@ noinst_PROGRAMS = cocoa-videooverlay
cocoa_videooverlay_SOURCES = cocoa-videooverlay.m cocoa_videooverlay_SOURCES = cocoa-videooverlay.m
cocoa_videooverlay_OBJCFLAGS=-Wno-error=unused-command-line-argument \ cocoa_videooverlay_OBJCFLAGS=-Wno-error=unused-command-line-argument \
-fobjc-arc \
-I$(top_srcdir)/gst-libs -I$(top_builddir)/gst-libs \ -I$(top_srcdir)/gst-libs -I$(top_builddir)/gst-libs \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) \
$(GL_CFLAGS) $(GL_OBJCFLAGS) $(GL_CFLAGS) $(GL_OBJCFLAGS)

View file

@ -121,8 +121,6 @@ static GstBusSyncReply create_window (GstBus* bus, GstMessage* message, MainWind
static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window) static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window)
{ {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
g_print ("end of stream\n"); g_print ("end of stream\n");
gst_element_set_state ([window pipeline], GST_STATE_NULL); gst_element_set_state ([window pipeline], GST_STATE_NULL);
@ -130,8 +128,6 @@ static void end_stream_cb(GstBus* bus, GstMessage* message, MainWindow* window)
g_main_loop_quit ([window loop]); g_main_loop_quit ([window loop]);
[window performSelectorOnMainThread:@selector(customClose) withObject:nil waitUntilDone:YES]; [window performSelectorOnMainThread:@selector(customClose) withObject:nil waitUntilDone:YES];
[pool release];
} }
static gpointer thread_func (MainWindow* window) static gpointer thread_func (MainWindow* window)
@ -162,11 +158,9 @@ int main(int argc, char **argv)
gboolean ok=FALSE; gboolean ok=FALSE;
GstBus *bus=NULL; GstBus *bus=NULL;
GThread *loop_thread=NULL; GThread *loop_thread=NULL;
NSAutoreleasePool *pool=nil;
NSRect rect; NSRect rect;
MainWindow *window=nil; MainWindow *window=nil;
pool = [[NSAutoreleasePool alloc] init];
[NSApplication sharedApplication]; [NSApplication sharedApplication];
g_print("app created\n"); g_print("app created\n");
@ -202,14 +196,15 @@ int main(int argc, char **argv)
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus); gst_bus_add_signal_watch (bus);
g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), window); /* NOTE: window is not bridge_retained because its lifetime is just this function */
g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), window); g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), window); g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, window, NULL); g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), (__bridge gpointer)window);
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, (__bridge gpointer)window, NULL);
gst_object_unref (bus); gst_object_unref (bus);
loop_thread = g_thread_new (NULL, loop_thread = g_thread_new (NULL,
(GThreadFunc) thread_func, window); (GThreadFunc) thread_func, (__bridge gpointer)window);
gst_element_set_state (pipeline, GST_STATE_PLAYING); gst_element_set_state (pipeline, GST_STATE_PLAYING);
@ -225,9 +220,5 @@ int main(int argc, char **argv)
g_thread_join (loop_thread); g_thread_join (loop_thread);
[window release];
[pool release];
return 0; return 0;
} }