iosavassetsrc: Port to 1.0

Also enables this element on OSX >= 10.7.

https://bugzilla.gnome.org/show_bug.cgi?id=728249
This commit is contained in:
Matthieu Bouron 2014-04-03 14:02:18 +01:00 committed by Edward Hervey
parent 9fec222b5a
commit ecf765635b
5 changed files with 288 additions and 184 deletions

View file

@ -79,8 +79,7 @@ noinst_HEADERS = \
if HAVE_IOS if HAVE_IOS
libgstapplemedia_la_SOURCES += \ libgstapplemedia_la_SOURCES += \
iosassetsrc.m \ iosassetsrc.m
iosavassetsrc.m
libgstapplemedia_la_LDFLAGS += \ libgstapplemedia_la_LDFLAGS += \
-Wl,-framework -Wl,Foundation \ -Wl,-framework -Wl,Foundation \
@ -102,7 +101,8 @@ endif
if HAVE_AVFOUNDATION if HAVE_AVFOUNDATION
libgstapplemedia_la_SOURCES += \ libgstapplemedia_la_SOURCES += \
avfvideosrc.m avfvideosrc.m \
iosavassetsrc.m
libgstapplemedia_la_LDFLAGS += \ libgstapplemedia_la_LDFLAGS += \
-Wl,-framework -Wl,AVFoundation -Wl,-framework -Wl,AVFoundation

View file

@ -39,6 +39,7 @@
#endif #endif
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/base/base.h>
#include "iosassetsrc.h" #include "iosassetsrc.h"
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",

View file

@ -22,7 +22,14 @@
#ifndef __GST_AVASSET_SRC_H__ #ifndef __GST_AVASSET_SRC_H__
#define __GST_AVASSET_SRC_H__ #define __GST_AVASSET_SRC_H__
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/base/base.h>
#include <gst/audio/audio.h>
#import <AVFoundation/AVFoundation.h>
#import <AVFoundation/AVAssetReader.h> #import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h> #import <AVFoundation/AVAssetReaderOutput.h>
@ -100,10 +107,6 @@ struct _GstAVAssetSrc
GstPad *videopad; GstPad *videopad;
GstPad *audiopad; GstPad *audiopad;
GstTask *video_task;
GstTask *audio_task;
GStaticRecMutex video_lock;
GStaticRecMutex audio_lock;
gint selected_video_track; gint selected_video_track;
gint selected_audio_track; gint selected_audio_track;
@ -112,6 +115,9 @@ struct _GstAVAssetSrc
GMutex lock; GMutex lock;
GstEvent *seek_event; GstEvent *seek_event;
GstFlowReturn last_audio_pad_ret;
GstFlowReturn last_video_pad_ret;
/* Properties */ /* Properties */
gchar * uri; gchar * uri;
}; };

View file

@ -2,6 +2,8 @@
* GStreamer * GStreamer
* Copyright (C) 2013 Fluendo S.L. <support@fluendo.com> * Copyright (C) 2013 Fluendo S.L. <support@fluendo.com>
* Authors: Andoni Morales Alastruey <amorales@fluendo.com> * Authors: Andoni Morales Alastruey <amorales@fluendo.com>
* Copyright (C) 2014 Collabora Ltd.
* Authors: Matthieu Bouron <matthieu.bouron@collabora.com>
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public
@ -36,11 +38,8 @@
# include <config.h> # include <config.h>
#endif #endif
#include <gst/gst.h>
#include "iosavassetsrc.h" #include "iosavassetsrc.h"
#include "coremediabuffer.h" #include "coremediabuffer.h"
#import <AVFoundation/AVFoundation.h>
GST_DEBUG_CATEGORY_STATIC (gst_avasset_src_debug); GST_DEBUG_CATEGORY_STATIC (gst_avasset_src_debug);
#define GST_CAT_DEFAULT gst_avasset_src_debug #define GST_CAT_DEFAULT gst_avasset_src_debug
@ -68,26 +67,27 @@ enum
PROP_URI PROP_URI
}; };
#define COMMON_CAPS "endianness = (int) {" G_STRINGIFY (G_BYTE_ORDER) " }, " \
"signed = (boolean) { TRUE }, " \
"rate = (int) [1, MAX], " \
"channels = (int) [1, 2];"
static GstStaticPadTemplate audio_factory = GST_STATIC_PAD_TEMPLATE ("audio", static GstStaticPadTemplate audio_factory = GST_STATIC_PAD_TEMPLATE ("audio",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_SOMETIMES, GST_PAD_SOMETIMES,
GST_STATIC_CAPS ( GST_STATIC_CAPS ("audio/x-raw, "
"audio/x-raw-float, width = (int) 32, depth = (int) 32, " COMMON_CAPS) "format = (string) F32LE, "
); "rate = " GST_AUDIO_RATE_RANGE ", "
"channels = (int) [1, 2], "
"layout = (string) interleaved"
)
);
static GstStaticPadTemplate video_factory = GST_STATIC_PAD_TEMPLATE ("video", static GstStaticPadTemplate video_factory = GST_STATIC_PAD_TEMPLATE ("video",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_SOMETIMES, GST_PAD_SOMETIMES,
GST_STATIC_CAPS ("video/x-raw-yuv, format=(fourcc)NV12, " GST_STATIC_CAPS ("video/x-raw, "
"framerate = (fraction) [ 0, MAX ], " "format = (string) NV12, "
"width = (int) [1, MAX], " "framerate = " GST_VIDEO_FPS_RANGE ", "
"height = (int) [1, MAX]") "width = " GST_VIDEO_SIZE_RANGE ", "
); "height = " GST_VIDEO_SIZE_RANGE
)
);
static void gst_avasset_src_set_property (GObject * object, guint prop_id, static void gst_avasset_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
@ -98,8 +98,8 @@ static void gst_avasset_src_dispose (GObject *object);
static GstStateChangeReturn gst_avasset_src_change_state (GstElement * element, static GstStateChangeReturn gst_avasset_src_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
static gboolean gst_avasset_src_query (GstPad *pad, GstQuery *query); static gboolean gst_avasset_src_query (GstPad *pad, GstObject * parent, GstQuery *query);
static gboolean gst_avasset_src_event (GstPad *pad, GstEvent *event); static gboolean gst_avasset_src_event (GstPad *pad, GstObject * parent, GstEvent *event);
static gboolean gst_avasset_src_send_event (GstAVAssetSrc *self, static gboolean gst_avasset_src_send_event (GstAVAssetSrc *self,
GstEvent *event); GstEvent *event);
@ -107,7 +107,7 @@ static void gst_avasset_src_read_audio (GstAVAssetSrc *self);
static void gst_avasset_src_read_video (GstAVAssetSrc *self); static void gst_avasset_src_read_video (GstAVAssetSrc *self);
static void gst_avasset_src_start (GstAVAssetSrc *self); static void gst_avasset_src_start (GstAVAssetSrc *self);
static void gst_avasset_src_stop (GstAVAssetSrc *self); static void gst_avasset_src_stop (GstAVAssetSrc *self);
static void gst_avasset_src_start_reading (GstAVAssetSrc *self); static gboolean gst_avasset_src_start_reading (GstAVAssetSrc *self);
static void gst_avasset_src_stop_reading (GstAVAssetSrc *self); static void gst_avasset_src_stop_reading (GstAVAssetSrc *self);
static void gst_avasset_src_stop_all (GstAVAssetSrc *self); static void gst_avasset_src_stop_all (GstAVAssetSrc *self);
static void gst_avasset_src_uri_handler_init (gpointer g_iface, static void gst_avasset_src_uri_handler_init (gpointer g_iface,
@ -128,28 +128,12 @@ _do_init (GType avassetsrc_type)
0, "iosavassetsrc element"); 0, "iosavassetsrc element");
} }
GST_BOILERPLATE_FULL (GstAVAssetSrc, gst_avasset_src, GstElement, G_DEFINE_TYPE_WITH_CODE (GstAVAssetSrc, gst_avasset_src, GST_TYPE_ELEMENT,
GST_TYPE_ELEMENT, _do_init); _do_init (g_define_type_id));
/* GObject vmethod implementations */ /* GObject vmethod implementations */
static void
gst_avasset_src_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple(element_class,
"Source and decoder for iOS assets",
"Source/Codec",
"Read and decode samples from iOS assets using the AVAssetReader API",
"Andoni Morales Alastruey amorales@fluendo.com");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&audio_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&video_factory));
}
static void static void
gst_avasset_src_class_init (GstAVAssetSrcClass * klass) gst_avasset_src_class_init (GstAVAssetSrcClass * klass)
{ {
@ -159,6 +143,17 @@ gst_avasset_src_class_init (GstAVAssetSrcClass * klass)
gobject_class = (GObjectClass *) klass; gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
gst_element_class_set_static_metadata (gstelement_class,
"Source and decoder for iOS assets",
"Source/Codec",
"Read and decode samples from iOS assets using the AVAssetReader API",
"Andoni Morales Alastruey amorales@fluendo.com");
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&audio_factory));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&video_factory));
gobject_class->set_property = gst_avasset_src_set_property; gobject_class->set_property = gst_avasset_src_set_property;
gobject_class->get_property = gst_avasset_src_get_property; gobject_class->get_property = gst_avasset_src_get_property;
gobject_class->dispose = gst_avasset_src_dispose; gobject_class->dispose = gst_avasset_src_dispose;
@ -180,17 +175,13 @@ gst_avasset_src_class_init (GstAVAssetSrcClass * klass)
} }
static void static void
gst_avasset_src_init (GstAVAssetSrc * self, GstAVAssetSrcClass * gclass) gst_avasset_src_init (GstAVAssetSrc * self)
{ {
self->audio_task = gst_task_create (
(GstTaskFunction) gst_avasset_src_read_audio, self);
self->video_task = gst_task_create (
(GstTaskFunction) gst_avasset_src_read_video, self);
gst_task_set_lock (self->video_task, &self->video_lock);
gst_task_set_lock (self->audio_task, &self->audio_lock);
self->selected_audio_track = 0; self->selected_audio_track = 0;
self->selected_video_track = 0; self->selected_video_track = 0;
g_mutex_init(&self->lock); self->last_audio_pad_ret = GST_FLOW_OK;
self->last_video_pad_ret = GST_FLOW_OK;
g_mutex_init (&self->lock);
} }
static void static void
@ -198,16 +189,6 @@ gst_avasset_src_dispose (GObject *object)
{ {
GstAVAssetSrc *self = GST_AVASSET_SRC (object); GstAVAssetSrc *self = GST_AVASSET_SRC (object);
if (self->video_task != NULL) {
gst_object_unref (self->video_task);
self->video_task = NULL;
}
if (self->audio_task) {
gst_object_unref (self->audio_task);
self->audio_task = NULL;
}
if (self->uri != NULL) { if (self->uri != NULL) {
g_free (self->uri); g_free (self->uri);
self->uri = NULL; self->uri = NULL;
@ -261,6 +242,10 @@ gst_avasset_src_change_state (GstElement * element, GstStateChange transition)
GstStateChangeReturn ret; GstStateChangeReturn ret;
GError *error; GError *error;
GST_DEBUG ("%s => %s",
gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
OBJC_CALLOUT_BEGIN (); OBJC_CALLOUT_BEGIN ();
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY: { case GST_STATE_CHANGE_NULL_TO_READY: {
@ -277,9 +262,7 @@ gst_avasset_src_change_state (GstElement * element, GstStateChange transition)
} }
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_avasset_src_start (self); gst_avasset_src_start (self);
self->state = GST_AVASSET_SRC_STATE_STARTED;
gst_avasset_src_start_reading (self); gst_avasset_src_start_reading (self);
self->state = GST_AVASSET_SRC_STATE_READING;
break; break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING: case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break; break;
@ -287,13 +270,14 @@ gst_avasset_src_change_state (GstElement * element, GstStateChange transition)
break; break;
} }
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ret = GST_ELEMENT_CLASS (gst_avasset_src_parent_class)->change_state (element, transition);
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED: case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break; break;
case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_avasset_src_stop_all (self); gst_avasset_src_stop_reading (self);
gst_avasset_src_stop (self);
break; break;
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
[self->reader release]; [self->reader release];
@ -305,11 +289,31 @@ gst_avasset_src_change_state (GstElement * element, GstStateChange transition)
return ret; return ret;
} }
static gboolean static GstCaps *
gst_avasset_src_query (GstPad *pad, GstQuery *query) gst_avasset_src_get_caps(GstAVAssetSrc * self, GstPad * pad, GstCaps * filter)
{ {
gboolean ret; GstCaps * caps;
GstAVAssetSrc *self = GST_AVASSET_SRC (gst_pad_get_parent_element(pad));
caps = gst_pad_get_current_caps (pad);
if (!caps) {
caps = gst_pad_get_pad_template_caps (pad);
}
if (filter) {
GstCaps *intersection = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
return caps;
}
static gboolean
gst_avasset_src_query (GstPad *pad, GstObject * parent, GstQuery *query)
{
gboolean ret = FALSE;
GstCaps *caps;
GstAVAssetSrc *self = GST_AVASSET_SRC (parent);
switch (GST_QUERY_TYPE (query)) { switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_URI: case GST_QUERY_URI:
@ -320,21 +324,37 @@ gst_avasset_src_query (GstPad *pad, GstQuery *query)
gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration); gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration);
ret = TRUE; ret = TRUE;
break; break;
case GST_QUERY_POSITION: case GST_QUERY_POSITION:
gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position); gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position);
ret = TRUE; ret = TRUE;
break; break;
default: case GST_QUERY_SEEKING: {
GstFormat fmt;
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
if (fmt == GST_FORMAT_TIME) {
gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, self->reader.duration);
ret = TRUE;
}
break;
}
case GST_QUERY_CAPS: {
GstCaps *filter = NULL;
gst_query_parse_caps (query, &filter);
caps = gst_avasset_src_get_caps (self, pad, filter);
gst_query_set_caps_result (query, caps);
ret = TRUE;
break;
}
default:
ret = FALSE; ret = FALSE;
break; break;
} }
g_object_unref(self);
return ret; return ret;
} }
static gboolean static gboolean
gst_avasset_src_event (GstPad * pad, GstEvent * event) gst_avasset_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{ {
GstAVAssetSrc *self; GstAVAssetSrc *self;
gboolean res = TRUE; gboolean res = TRUE;
@ -350,16 +370,23 @@ gst_avasset_src_event (GstPad * pad, GstEvent * event)
gdouble rate; gdouble rate;
GstSeekType start_type, stop_type; GstSeekType start_type, stop_type;
gint64 start, stop; gint64 start, stop;
GstEvent *newsegment; GstSegment segment;
/* now do the seek */ GST_DEBUG ("Processing SEEK event");
GST_AVASSET_SRC_LOCK (self);
GST_AVF_ASSET_SRC_LOCK (self);
if (self->seek_event && gst_event_get_seqnum (event) == if (self->seek_event && gst_event_get_seqnum (event) ==
gst_event_get_seqnum (self->seek_event)) { gst_event_get_seqnum (self->seek_event)) {
GST_AVASSET_SRC_UNLOCK (self); GST_AVF_ASSET_SRC_UNLOCK (self);
break; break;
} }
self->seek_event = gst_event_ref (event); self->seek_event = gst_event_ref (event);
GST_AVF_ASSET_SRC_UNLOCK (self);
/* pause tasks before re-acquiring the object's lock */
gst_avf_asset_src_stop_reading (self);
GST_AVF_ASSET_SRC_LOCK (self);
gst_event_parse_seek (event, &rate, &format, &flags, &start_type, gst_event_parse_seek (event, &rate, &format, &flags, &start_type,
&start, &stop_type, &stop); &start, &stop_type, &stop);
@ -370,9 +397,9 @@ gst_avasset_src_event (GstPad * pad, GstEvent * event)
break; break;
} }
gst_avasset_src_stop_reading (self);
if (format != GST_FORMAT_TIME || start_type == GST_SEEK_TYPE_NONE) { if (format != GST_FORMAT_TIME || start_type == GST_SEEK_TYPE_NONE) {
GST_AVASSET_SRC_UNLOCK(self); GST_AVASSET_SRC_UNLOCK(self);
gst_avasset_src_start_reading (self);
res = FALSE; res = FALSE;
break; break;
} }
@ -381,10 +408,15 @@ gst_avasset_src_event (GstPad * pad, GstEvent * event)
} }
gst_avasset_src_send_event (self, gst_event_new_flush_start ()); gst_avasset_src_send_event (self, gst_event_new_flush_start ());
[self->reader seekTo: start: stop: &error]; [self->reader seekTo: start: stop: &error];
newsegment = gst_event_new_new_segment (FALSE, rate, GST_FORMAT_TIME,
start, stop, start); gst_segment_init (&segment, GST_FORMAT_TIME);
gst_avasset_src_send_event (self, gst_event_new_flush_stop ()); segment.rate = rate;
gst_avasset_src_send_event (self, newsegment); segment.start = start;
segment.stop = stop;
segment.position = start;
gst_avasset_src_send_event (self, gst_event_new_flush_stop (TRUE));
gst_avasset_src_send_event (self, gst_event_new_segment (&segment));
if (error != NULL) { if (error != NULL) {
GST_ELEMENT_ERROR (self, RESOURCE, SEEK, GST_ELEMENT_ERROR (self, RESOURCE, SEEK,
@ -392,13 +424,15 @@ gst_avasset_src_event (GstPad * pad, GstEvent * event)
g_error_free(error); g_error_free(error);
res = FALSE; res = FALSE;
} }
gst_avasset_src_start_reading (self);
GST_AVASSET_SRC_UNLOCK (self); GST_AVASSET_SRC_UNLOCK (self);
gst_event_unref (event); gst_event_unref (event);
/* start tasks after releasing the object's lock */
gst_avasset_src_start_reading (self);
break; break;
} }
default: default:
res = gst_pad_event_default (pad, event); res = gst_pad_event_default (pad, parent, event);
break; break;
} }
@ -407,19 +441,68 @@ gst_avasset_src_event (GstPad * pad, GstEvent * event)
return res; return res;
} }
static GstFlowReturn
gst_avasset_src_send_start_stream (GstAVAssetSrc * self, GstPad * pad)
{
GstEvent *event;
gchar *stream_id;
GstFlowReturn ret;
stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT_CAST (self), NULL);
GST_DEBUG_OBJECT (self, "Pushing STREAM START");
event = gst_event_new_stream_start (stream_id);
gst_event_set_group_id (event, gst_util_group_id_next ());
ret = gst_pad_push_event (pad, event);
g_free (stream_id);
return ret;
}
static GstFlowReturn
gst_avasset_src_combine_flows (GstAVAssetSrc * self, GstAVAssetReaderMediaType type,
GstFlowReturn ret)
{
gboolean has_other_pad;
GstFlowReturn last_other_pad_ret;
GST_AVASSET_SRC_LOCK (self);
if (type == GST_AVASSET_READER_MEDIA_TYPE_AUDIO) {
self->last_audio_pad_ret = ret;
has_other_pad = AVASSET_READER_HAS_VIDEO (ret);
last_other_pad_ret = self->last_video_pad_ret;
} else if (type == GST_AVASSET_READER_MEDIA_TYPE_VIDEO) {
self->last_video_pad_ret = ret;
has_other_pad = AVASSET_READER_HAS_AUDIO (ret);
last_other_pad_ret = self->last_audio_pad_ret;
} else {
GST_ERROR ("Unsupported media type");
ret = GST_FLOW_ERROR;
goto exit;
}
if (!has_other_pad || ret != GST_FLOW_NOT_LINKED)
goto exit;
ret = last_other_pad_ret;
exit:
GST_AVASSET_SRC_UNLOCK (self);
return ret;
}
static void static void
gst_avasset_src_read_data (GstAVAssetSrc *self, GstPad *pad, gst_avasset_src_read_data (GstAVAssetSrc *self, GstPad *pad,
GstAVAssetReaderMediaType type) GstAVAssetReaderMediaType type)
{ {
GstBuffer *buf; GstBuffer *buf;
GstFlowReturn ret; GstFlowReturn ret, combined_ret;
GError *error; GError *error;
OBJC_CALLOUT_BEGIN (); OBJC_CALLOUT_BEGIN ();
GST_AVASSET_SRC_LOCK (self); GST_AVASSET_SRC_LOCK (self);
if (self->state == GST_AVASSET_SRC_STATE_STOPPED) { if (self->state != GST_AVASSET_SRC_STATE_READING) {
GST_AVASSET_SRC_UNLOCK (self); GST_AVASSET_SRC_UNLOCK (self);
goto exit; goto exit;
} }
@ -432,19 +515,35 @@ gst_avasset_src_read_data (GstAVAssetSrc *self, GstPad *pad,
GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Error reading next buffer"), GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Error reading next buffer"),
("%s", error->message)); ("%s", error->message));
g_error_free (error); g_error_free (error);
gst_avasset_src_stop_all (self);
gst_avasset_src_combine_flows (self, type, GST_FLOW_ERROR);
gst_pad_pause_task (pad);
goto exit; goto exit;
} }
gst_pad_push_event (pad, gst_event_new_eos ()); gst_pad_push_event (pad, gst_event_new_eos ());
gst_avasset_src_stop_all (self); gst_avasset_src_combine_flows (self, type, GST_FLOW_EOS);
gst_pad_pause_task (pad);
goto exit; goto exit;
} }
ret = gst_pad_push (pad, buf); ret = gst_pad_push (pad, buf);
if (GST_FLOW_IS_FATAL (ret)) { combined_ret = gst_avasset_src_combine_flows (self, type, ret);
GST_ELEMENT_ERROR (self, STREAM, FAILED, NULL,
("Error pushing %s buffer on pad", MEDIA_TYPE_TO_STR (type))); if (ret != GST_FLOW_OK) {
gst_avasset_src_stop_all (self); GST_WARNING ("Error pushing %s buffer on pad %" GST_PTR_FORMAT
", reason %s", MEDIA_TYPE_TO_STR (type), pad, gst_flow_get_name (ret));
if (ret == GST_FLOW_EOS) {
gst_pad_push_event (pad, gst_event_new_eos ());
}
if (combined_ret != GST_FLOW_OK) {
GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."),
("stream stopped reason %s", gst_flow_get_name (ret)));
}
gst_pad_pause_task (pad);
} }
exit: exit:
@ -466,7 +565,7 @@ gst_avasset_src_read_video (GstAVAssetSrc *self)
} }
static gboolean static gboolean
gst_avasset_src_start_task (GstAVAssetSrc *self, GstTask *task) gst_avasset_src_start_reader (GstAVAssetSrc * self)
{ {
GError *error = NULL; GError *error = NULL;
gboolean ret = TRUE; gboolean ret = TRUE;
@ -478,45 +577,15 @@ gst_avasset_src_start_task (GstAVAssetSrc *self, GstTask *task)
GST_ELEMENT_ERROR (self, RESOURCE, FAILED, GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
("AVAssetReader could not start reading"), ("%s", error->message)); ("AVAssetReader could not start reading"), ("%s", error->message));
g_error_free (error); g_error_free (error);
gst_avasset_src_stop_all (self);
ret = FALSE; ret = FALSE;
goto exit; goto exit;
} }
gst_task_start (task);
exit: exit:
OBJC_CALLOUT_END (); OBJC_CALLOUT_END ();
return ret; return ret;
} }
static GstPadLinkReturn
gst_avasset_src_audio_pad_link (GstPad *pad, GstPad *peer)
{
GstAVAssetSrc *self = GST_AVASSET_SRC (gst_pad_get_parent_element (pad));
if (!gst_avasset_src_start_task (self, self->audio_task)) {
return GST_PAD_LINK_REFUSED;
}
GST_DEBUG ("Started audio streaming task");
g_object_unref (self);
return GST_PAD_LINK_OK;
}
static GstPadLinkReturn
gst_avasset_src_video_pad_link (GstPad *pad, GstPad *peer)
{
GstAVAssetSrc *self = GST_AVASSET_SRC (gst_pad_get_parent_element (pad));
if (!gst_avasset_src_start_task (self, self->video_task)) {
return GST_PAD_LINK_REFUSED;
}
GST_DEBUG ("Started video streaming task");
g_object_unref (self);
return GST_PAD_LINK_OK;
}
static gboolean static gboolean
gst_avasset_src_send_event (GstAVAssetSrc *self, GstEvent *event) gst_avasset_src_send_event (GstAVAssetSrc *self, GstEvent *event)
{ {
@ -539,6 +608,8 @@ gst_avasset_src_send_event (GstAVAssetSrc *self, GstEvent *event)
static void static void
gst_avasset_src_start (GstAVAssetSrc *self) gst_avasset_src_start (GstAVAssetSrc *self)
{ {
GstSegment segment;
OBJC_CALLOUT_BEGIN (); OBJC_CALLOUT_BEGIN ();
if (self->state == GST_AVASSET_SRC_STATE_STARTED) { if (self->state == GST_AVASSET_SRC_STATE_STARTED) {
goto exit; goto exit;
@ -546,6 +617,9 @@ gst_avasset_src_start (GstAVAssetSrc *self)
GST_DEBUG_OBJECT (self, "Creating pads and starting reader"); GST_DEBUG_OBJECT (self, "Creating pads and starting reader");
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.duration = self->reader.duration;
/* We call AVAssetReader's startReading when the pads are linked /* We call AVAssetReader's startReading when the pads are linked
* and no outputs can be added afterwards, so the tracks must be * and no outputs can be added afterwards, so the tracks must be
* selected before adding any of the new pads */ * selected before adding any of the new pads */
@ -561,31 +635,39 @@ gst_avasset_src_start (GstAVAssetSrc *self)
if (AVASSET_READER_HAS_AUDIO (self)) { if (AVASSET_READER_HAS_AUDIO (self)) {
self->audiopad = gst_pad_new_from_static_template (&audio_factory, "audio"); self->audiopad = gst_pad_new_from_static_template (&audio_factory, "audio");
gst_pad_set_query_function (self->audiopad, gst_pad_set_query_function (self->audiopad,
(GstPadQueryFunction) gst_avasset_src_query); gst_avasset_src_query);
gst_pad_set_event_function(self->audiopad, gst_pad_set_event_function(self->audiopad,
(GstPadEventFunction) gst_avasset_src_event); gst_avasset_src_event);
gst_pad_set_link_function(self->audiopad, gst_pad_use_fixed_caps (self->audiopad);
(GstPadLinkFunction) gst_avasset_src_audio_pad_link);
gst_pad_set_active (self->audiopad, TRUE); gst_pad_set_active (self->audiopad, TRUE);
gst_avasset_src_send_start_stream (self, self->audiopad);
gst_pad_set_caps (self->audiopad, gst_pad_set_caps (self->audiopad,
[self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_AUDIO]); [self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_AUDIO]);
gst_pad_push_event (self->audiopad, gst_event_new_caps (
[self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_AUDIO]));
gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->audiopad); gst_element_add_pad (GST_ELEMENT (self), self->audiopad);
} }
if (AVASSET_READER_HAS_VIDEO (self)) { if (AVASSET_READER_HAS_VIDEO (self)) {
self->videopad = gst_pad_new_from_static_template (&video_factory, "video"); self->videopad = gst_pad_new_from_static_template (&video_factory, "video");
gst_pad_set_query_function (self->videopad, gst_pad_set_query_function (self->videopad,
(GstPadQueryFunction) gst_avasset_src_query); gst_avasset_src_query);
gst_pad_set_event_function(self->videopad, gst_pad_set_event_function(self->videopad,
(GstPadEventFunction) gst_avasset_src_event); gst_avasset_src_event);
gst_pad_set_link_function(self->videopad, gst_pad_use_fixed_caps (self->videopad);
(GstPadLinkFunction) gst_avasset_src_video_pad_link);
gst_pad_set_active (self->videopad, TRUE); gst_pad_set_active (self->videopad, TRUE);
gst_avasset_src_send_start_stream (self, self->videopad);
gst_pad_set_caps (self->videopad, gst_pad_set_caps (self->videopad,
[self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_VIDEO]); [self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_VIDEO]);
gst_pad_push_event (self->videopad, gst_event_new_caps (
[self->reader getCaps: GST_AVASSET_READER_MEDIA_TYPE_VIDEO]));
gst_pad_push_event (self->videopad, gst_event_new_segment (&segment));
gst_element_add_pad (GST_ELEMENT (self), self->videopad); gst_element_add_pad (GST_ELEMENT (self), self->videopad);
} }
gst_element_no_more_pads (GST_ELEMENT (self)); gst_element_no_more_pads (GST_ELEMENT (self));
self->state = GST_AVASSET_SRC_STATE_STARTED;
exit: exit:
OBJC_CALLOUT_END (); OBJC_CALLOUT_END ();
} }
@ -600,61 +682,81 @@ gst_avasset_src_stop (GstAVAssetSrc *self)
goto exit; goto exit;
} }
GST_DEBUG ("Stop reading"); GST_DEBUG ("Stopping tasks and removing pads");
has_audio = AVASSET_READER_HAS_AUDIO (self); has_audio = AVASSET_READER_HAS_AUDIO (self);
has_video = AVASSET_READER_HAS_VIDEO (self); has_video = AVASSET_READER_HAS_VIDEO (self);
[self->reader stop]; [self->reader stop];
if (has_audio) { if (has_audio) {
gst_pad_stop_task (self->audiopad);
gst_element_remove_pad (GST_ELEMENT (self), self->audiopad); gst_element_remove_pad (GST_ELEMENT (self), self->audiopad);
gst_task_stop (self->audio_task);
} }
if (has_video) { if (has_video) {
gst_pad_stop_task (self->videopad);
gst_element_remove_pad (GST_ELEMENT (self), self->videopad); gst_element_remove_pad (GST_ELEMENT (self), self->videopad);
gst_task_stop (self->video_task);
} }
self->state = GST_AVASSET_SRC_STATE_STOPPED;
exit: exit:
OBJC_CALLOUT_END (); OBJC_CALLOUT_END ();
} }
static void static gboolean
gst_avasset_src_start_reading (GstAVAssetSrc *self) gst_avasset_src_start_reading (GstAVAssetSrc *self)
{ {
/* Only start the task here if we were playing before, otherwise wait until gboolean ret = TRUE;
* the pad are linked to start the streaming task.
*/
if (self->state < GST_AVASSET_SRC_STATE_STARTED) { if (self->state != GST_AVASSET_SRC_STATE_STARTED) {
return; goto exit;
} }
GST_DEBUG_OBJECT (self, "Start reading"); GST_DEBUG_OBJECT (self, "Start reading");
if (GST_TASK_STATE (self->audio_task) == GST_TASK_PAUSED) { if ((ret = gst_avasset_src_start_reader (self)) != TRUE) {
gst_task_start (self->audio_task); goto exit;
} }
if (GST_TASK_STATE (self->video_task) == GST_TASK_PAUSED) {
gst_task_start (self->video_task); if (AVASSET_READER_HAS_AUDIO (self)) {
ret = gst_pad_start_task (self->audiopad, (GstTaskFunction)gst_avasset_src_read_audio, self, NULL);
if (!ret) {
GST_ERROR ("Failed to start audio task");
goto exit;
}
} }
if (AVASSET_READER_HAS_VIDEO (self)) {
ret = gst_pad_start_task (self->videopad, (GstTaskFunction)gst_avasset_src_read_video, self, NULL);
if (!ret) {
GST_ERROR ("Failed to start video task");
goto exit;
}
}
self->state = GST_AVASSET_SRC_STATE_READING;
exit:
return ret;
} }
static void static void
gst_avasset_src_stop_reading (GstAVAssetSrc *self) gst_avasset_src_stop_reading (GstAVAssetSrc * self)
{ {
if (self->state != GST_AVASSET_SRC_STATE_READING) { if (self->state != GST_AVASSET_SRC_STATE_READING) {
return; return;
} }
GST_DEBUG_OBJECT (self, "Stop reading"); GST_DEBUG_OBJECT (self, "Stop reading");
if (GST_TASK_STATE (self->audio_task) == GST_TASK_STARTED) { if (AVASSET_READER_HAS_AUDIO (self)) {
gst_task_pause (self->audio_task); gst_pad_pause_task (self->audiopad);
} }
if (GST_TASK_STATE (self->video_task) == GST_TASK_STARTED) { if (AVASSET_READER_HAS_VIDEO (self)) {
gst_task_pause (self->video_task); gst_pad_pause_task (self->videopad);
} }
self->state = GST_AVASSET_SRC_STATE_STARTED;
} }
static void static void
@ -663,7 +765,6 @@ gst_avasset_src_stop_all (GstAVAssetSrc *self)
GST_AVASSET_SRC_LOCK (self); GST_AVASSET_SRC_LOCK (self);
gst_avasset_src_stop_reading (self); gst_avasset_src_stop_reading (self);
gst_avasset_src_stop (self); gst_avasset_src_stop (self);
self->state = GST_AVASSET_SRC_STATE_STOPPED;
GST_AVASSET_SRC_UNLOCK (self); GST_AVASSET_SRC_UNLOCK (self);
} }
@ -679,30 +780,29 @@ gst_avasset_src_error_quark (void)
} }
static GstURIType static GstURIType
gst_avasset_src_uri_get_type (void) gst_avasset_src_uri_get_type (GType type)
{ {
return GST_URI_SRC; return GST_URI_SRC;
} }
static gchar ** static const gchar * const *
gst_avasset_src_uri_get_protocols (void) gst_avasset_src_uri_get_protocols (GType type)
{ {
static gchar *protocols[] = { (gchar *) "file", static const gchar * const protocols[] = { "file", "ipod-library", NULL };
(gchar *) "ipod-library", NULL };
return protocols; return protocols;
} }
static const gchar * static gchar *
gst_avasset_src_uri_get_uri (GstURIHandler * handler) gst_avasset_src_uri_get_uri (GstURIHandler * handler)
{ {
GstAVAssetSrc *self = GST_AVASSET_SRC (handler); GstAVAssetSrc *self = GST_AVASSET_SRC (handler);
return self->uri; return g_strdup (self->uri);
} }
static gboolean static gboolean
gst_avasset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri) gst_avasset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GError **error)
{ {
GstAVAssetSrc *self = GST_AVASSET_SRC (handler); GstAVAssetSrc *self = GST_AVASSET_SRC (handler);
NSString *str; NSString *str;
@ -724,6 +824,9 @@ gst_avasset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri)
g_free (self->uri); g_free (self->uri);
} }
self->uri = g_strdup (uri); self->uri = g_strdup (uri);
} else {
g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
"Invalid URI '%s' for ios_assetsrc", self->uri);
} }
OBJC_CALLOUT_END (); OBJC_CALLOUT_END ();
return ret; return ret;
@ -747,24 +850,21 @@ gst_avasset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
- (NSDictionary *) capsToAudioSettings - (NSDictionary *) capsToAudioSettings
{ {
gint samplerate, channels, depth, width; gint depth;
gboolean isFloat; gboolean isFloat;
GstStructure *s; GstAudioInfo info;
if (!gst_caps_is_fixed (audio_caps)) if (!gst_caps_is_fixed (audio_caps))
return NULL; return NULL;
s = gst_caps_get_structure (audio_caps, 0); gst_audio_info_from_caps (&info, audio_caps);
isFloat = gst_structure_has_name (s, "audio/x-raw-float"); isFloat = GST_AUDIO_INFO_IS_FLOAT(&info);
gst_structure_get_int (s, "rate", &samplerate); depth = GST_AUDIO_INFO_DEPTH(&info);
gst_structure_get_int (s, "depth", &depth);
gst_structure_get_int (s, "width", &width);
gst_structure_get_int (s, "channels", &channels);
return [NSDictionary dictionaryWithObjectsAndKeys: return [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat: samplerate], AVSampleRateKey, [NSNumber numberWithFloat:info.rate], AVSampleRateKey,
[NSNumber numberWithInt:channels], AVNumberOfChannelsKey, [NSNumber numberWithInt:info.channels], AVNumberOfChannelsKey,
//[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], //[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],
//AVChannelLayoutKey, //AVChannelLayoutKey,
[NSNumber numberWithInt:depth], AVLinearPCMBitDepthKey, [NSNumber numberWithInt:depth], AVLinearPCMBitDepthKey,
@ -801,8 +901,8 @@ gst_avasset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain]; audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain];
video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain]; video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain];
reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
GST_INFO ("Found %d video tracks and %d audio tracks", GST_INFO ("Found %lu video tracks and %lu audio tracks",
[video_tracks count], [audio_tracks count]); (unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]);
} }
- (id) initWithURI:(gchar*)uri : (GError **)error; - (id) initWithURI:(gchar*)uri : (GError **)error;
@ -835,10 +935,8 @@ gst_avasset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
/* FIXME: use fixed caps here until we found a way to determine /* FIXME: use fixed caps here until we found a way to determine
* the native audio format */ * the native audio format */
audio_caps = gst_caps_from_string ("audio/x-raw-float, " audio_caps = gst_caps_from_string ("audio/x-raw, "
"endianness = (int) 1234, signed = (boolean) TRUE, " "format=F32LE, rate=44100, channels=2, layout=interleaved");
"rate = (int) 44100, channels = (int) 2, "
"width= (int) 32, depth = (int) 32");
[self initReader: error]; [self initReader: error];
if (*error) { if (*error) {
@ -985,14 +1083,13 @@ gst_avasset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
return NULL; return NULL;
} }
buf = gst_core_media_buffer_new (cmbuf); buf = gst_core_media_buffer_new (cmbuf, FALSE);
dur = CMSampleBufferGetDuration (cmbuf); dur = CMSampleBufferGetDuration (cmbuf);
ts = CMSampleBufferGetPresentationTimeStamp (cmbuf); ts = CMSampleBufferGetPresentationTimeStamp (cmbuf);
if (dur.value != 0) { if (dur.value != 0) {
GST_BUFFER_DURATION (buf) = CMTIME_TO_GST_TIME (dur); GST_BUFFER_DURATION (buf) = CMTIME_TO_GST_TIME (dur);
} }
GST_BUFFER_TIMESTAMP (buf) = CMTIME_TO_GST_TIME (ts); GST_BUFFER_TIMESTAMP (buf) = CMTIME_TO_GST_TIME (ts);
gst_buffer_set_caps (buf, caps);
GST_LOG ("Copying next %s buffer ts:%" GST_TIME_FORMAT " dur:%" GST_LOG ("Copying next %s buffer ts:%" GST_TIME_FORMAT " dur:%"
GST_TIME_FORMAT, MEDIA_TYPE_TO_STR (type), GST_TIME_FORMAT, MEDIA_TYPE_TO_STR (type),
GST_TIME_ARGS(GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS(GST_BUFFER_TIMESTAMP (buf)),
@ -1016,8 +1113,8 @@ gst_avasset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
track = [video_tracks objectAtIndex: selected_video_track]; track = [video_tracks objectAtIndex: selected_video_track];
gst_util_double_to_fraction(track.nominalFrameRate, &fr_n, &fr_d); gst_util_double_to_fraction(track.nominalFrameRate, &fr_n, &fr_d);
caps = gst_caps_new_simple ("video/x-raw-yuv", caps = gst_caps_new_simple ("video/x-raw",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'), "format", G_TYPE_STRING, "NV12",
"width", G_TYPE_INT, (int) track.naturalSize.width, "width", G_TYPE_INT, (int) track.naturalSize.width,
"height", G_TYPE_INT, (int) track.naturalSize.height, "height", G_TYPE_INT, (int) track.naturalSize.height,
"framerate", GST_TYPE_FRACTION, fr_n, fr_d, NULL); "framerate", GST_TYPE_FRACTION, fr_n, fr_d, NULL);

View file

@ -24,12 +24,12 @@
#include <Foundation/Foundation.h> #include <Foundation/Foundation.h>
#ifdef HAVE_IOS #ifdef HAVE_IOS
#include "iosassetsrc.h" #include "iosassetsrc.h"
#include "iosavassetsrc.h"
#else #else
#include "qtkitvideosrc.h" #include "qtkitvideosrc.h"
#endif #endif
#ifdef HAVE_AVFOUNDATION #ifdef HAVE_AVFOUNDATION
#include "avfvideosrc.h" #include "avfvideosrc.h"
#include "iosavassetsrc.h"
#endif #endif
#ifndef HAVE_IOS #ifndef HAVE_IOS
#include "vth264decbin.h" #include "vth264decbin.h"
@ -59,8 +59,6 @@ plugin_init (GstPlugin * plugin)
#ifdef HAVE_IOS #ifdef HAVE_IOS
res &= gst_element_register (plugin, "iosassetsrc", GST_RANK_SECONDARY, res &= gst_element_register (plugin, "iosassetsrc", GST_RANK_SECONDARY,
GST_TYPE_IOS_ASSET_SRC); GST_TYPE_IOS_ASSET_SRC);
res &= gst_element_register (plugin, "iosavassetsrc", GST_RANK_PRIMARY,
GST_TYPE_AVASSET_SRC);
#else #else
enable_mt_mode (); enable_mt_mode ();
@ -71,6 +69,8 @@ plugin_init (GstPlugin * plugin)
#ifdef HAVE_AVFOUNDATION #ifdef HAVE_AVFOUNDATION
res &= gst_element_register (plugin, "avfvideosrc", GST_RANK_PRIMARY, res &= gst_element_register (plugin, "avfvideosrc", GST_RANK_PRIMARY,
GST_TYPE_AVF_VIDEO_SRC); GST_TYPE_AVF_VIDEO_SRC);
res &= gst_element_register (plugin, "iosavassetsrc", GST_RANK_PRIMARY,
GST_TYPE_AVASSET_SRC);
#endif #endif
#if 0 #if 0