Merge remote-tracking branch 'origin/master' into 0.11

Conflicts:
	bindings/python/Makefile.am
	bindings/python/ges.override
	bindings/python/gesmodule.c
	configure.ac
This commit is contained in:
Edward Hervey 2011-12-30 17:24:37 +01:00
commit a32f1bd4dc
4 changed files with 96 additions and 24 deletions

View file

@ -63,9 +63,9 @@ extra_files =
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib # CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties. # contains GtkObjects/GObjects and you want to document signals and properties.
GTKDOC_CFLAGS = -I$(top_srcdir) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(GST_PBUTILS_CFLAGS) GTKDOC_CFLAGS = -I$(top_srcdir) $(GST_PBUTILS_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
GTKDOC_LIBS = \ GTKDOC_LIBS = \
$(top_builddir)/ges/libges-@GST_MAJORMINOR@.la $(top_builddir)/ges/libges-@GST_MAJORMINOR@.la \
$(GST_BASE_LIBS) $(GST_LIBS) $(GST_BASE_LIBS) $(GST_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC) GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)

View file

@ -928,6 +928,44 @@ ges_timeline_pipeline_preview_set_video_sink (GESTimelinePipeline * self,
g_object_set (self->priv->playsink, "video-sink", sink, NULL); g_object_set (self->priv->playsink, "video-sink", sink, NULL);
}; };
/**
* ges_timeline_pipeline_preview_get_audio_sink:
* @self: a #GESTimelinePipeline
*
* Obtains a pointer to playsink's audio sink element that is used for
* displaying audio when the #GESTimelinePipeline is in %TIMELINE_MODE_PREVIEW
*
* The caller is responsible for unreffing the returned element with
* #gst_object_unref.
*
* Returns: (transfer full): a pointer to the playsink audio sink #GstElement
*/
GstElement *
ges_timeline_pipeline_preview_get_audio_sink (GESTimelinePipeline * self)
{
GstElement *sink;
g_object_get (self->priv->playsink, "audio-sink", &sink, NULL);
return sink;
};
/**
* ges_timeline_pipeline_preview_set_audio_sink:
* @self: a #GESTimelinePipeline in %GST_STATE_NULL
* @sink: (transfer none): a audio sink #GstElement
*
* Sets playsink's audio sink element that is used for displaying audio when
* the #GESTimelinePipeline is in %TIMELINE_MODE_PREVIEW
*/
void
ges_timeline_pipeline_preview_set_audio_sink (GESTimelinePipeline * self,
GstElement * sink)
{
g_object_set (self->priv->playsink, "audio-sink", sink, NULL);
};
static gboolean static gboolean
play_sink_multiple_seeks_send_event (GstElement * element, GstEvent * event) play_sink_multiple_seeks_send_event (GstElement * element, GstEvent * event)
{ {

View file

@ -106,6 +106,13 @@ void
ges_timeline_pipeline_preview_set_video_sink (GESTimelinePipeline * self, ges_timeline_pipeline_preview_set_video_sink (GESTimelinePipeline * self,
GstElement * sink); GstElement * sink);
GstElement *
ges_timeline_pipeline_preview_get_audio_sink (GESTimelinePipeline * self);
void
ges_timeline_pipeline_preview_set_audio_sink (GESTimelinePipeline * self,
GstElement * sink);
G_END_DECLS G_END_DECLS
#endif /* _GES_TIMELINE_PIPELINE */ #endif /* _GES_TIMELINE_PIPELINE */

View file

@ -129,44 +129,50 @@ make_encoding_profile (gchar * audio, gchar * video, gchar * video_restriction,
NULL); NULL);
gst_caps_unref (caps); gst_caps_unref (caps);
caps = gst_caps_from_string (audio); if (audio) {
stream = (GstEncodingProfile *) caps = gst_caps_from_string (audio);
gst_encoding_audio_profile_new (caps, audio_preset, NULL, 0); stream = (GstEncodingProfile *)
gst_encoding_container_profile_add_profile (profile, stream); gst_encoding_audio_profile_new (caps, audio_preset, NULL, 0);
gst_caps_unref (caps); gst_encoding_container_profile_add_profile (profile, stream);
gst_caps_unref (caps);
}
caps = gst_caps_from_string (video); if (video) {
stream = (GstEncodingProfile *) caps = gst_caps_from_string (video);
gst_encoding_video_profile_new (caps, video_preset, NULL, 0); stream = (GstEncodingProfile *)
if (video_restriction) gst_encoding_video_profile_new (caps, video_preset, NULL, 0);
gst_encoding_profile_set_restriction (stream, if (video_restriction)
gst_caps_from_string (video_restriction)); gst_encoding_profile_set_restriction (stream,
gst_encoding_container_profile_add_profile (profile, stream); gst_caps_from_string (video_restriction));
gst_caps_unref (caps); gst_encoding_container_profile_add_profile (profile, stream);
gst_caps_unref (caps);
}
return (GstEncodingProfile *) profile; return (GstEncodingProfile *) profile;
} }
static GESTimeline * static GESTimeline *
create_timeline (int nbargs, gchar ** argv) create_timeline (int nbargs, gchar ** argv, gchar * audio, gchar * video)
{ {
GESTimelineLayer *layer; GESTimelineLayer *layer;
GESTrack *tracka, *trackv; GESTrack *tracka = NULL, *trackv = NULL;
GESTimeline *timeline; GESTimeline *timeline;
guint i; guint i;
timeline = ges_timeline_new (); timeline = ges_timeline_new ();
tracka = ges_track_audio_raw_new (); if (audio)
trackv = ges_track_video_raw_new (); tracka = ges_track_audio_raw_new ();
if (video)
trackv = ges_track_video_raw_new ();
/* We are only going to be doing one layer of timeline objects */ /* We are only going to be doing one layer of timeline objects */
layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); layer = (GESTimelineLayer *) ges_simple_timeline_layer_new ();
/* Add the tracks and the layer to the timeline */ /* Add the tracks and the layer to the timeline */
if (!ges_timeline_add_layer (timeline, layer) || if (!ges_timeline_add_layer (timeline, layer) ||
!ges_timeline_add_track (timeline, tracka) || !(!audio || ges_timeline_add_track (timeline, tracka)) ||
!ges_timeline_add_track (timeline, trackv)) !(!video || ges_timeline_add_track (timeline, trackv)))
goto build_failure; goto build_failure;
/* Here we've finished initializing our timeline, we're /* Here we've finished initializing our timeline, we're
@ -264,7 +270,8 @@ build_failure:
} }
static GESTimelinePipeline * static GESTimelinePipeline *
create_pipeline (gchar * load_path, gchar * save_path, int argc, char **argv) create_pipeline (gchar * load_path, gchar * save_path, int argc, char **argv,
gchar * audio, gchar * video)
{ {
GESTimelinePipeline *pipeline = NULL; GESTimelinePipeline *pipeline = NULL;
GESTimeline *timeline = NULL; GESTimeline *timeline = NULL;
@ -288,7 +295,7 @@ create_pipeline (gchar * load_path, gchar * save_path, int argc, char **argv)
g_free (uri); g_free (uri);
} else } else
/* Normal timeline creation */ /* Normal timeline creation */
if (!(timeline = create_timeline (argc, argv))) if (!(timeline = create_timeline (argc, argv, audio, video)))
goto failure; goto failure;
/* save project if path is given. we do this now in case GES crashes or /* save project if path is given. we do this now in case GES crashes or
@ -400,11 +407,13 @@ main (int argc, gchar ** argv)
gchar *video_restriction = (gchar *) "ANY"; gchar *video_restriction = (gchar *) "ANY";
gchar *audio_preset = NULL; gchar *audio_preset = NULL;
gchar *video_preset = NULL; gchar *video_preset = NULL;
gchar *exclude_args = NULL;
static gboolean render = FALSE; static gboolean render = FALSE;
static gboolean smartrender = FALSE; static gboolean smartrender = FALSE;
static gboolean list_transitions = FALSE; static gboolean list_transitions = FALSE;
static gboolean list_patterns = FALSE; static gboolean list_patterns = FALSE;
static gdouble thumbinterval = 0; static gdouble thumbinterval = 0;
static gboolean verbose = FALSE;
gchar *save_path = NULL; gchar *save_path = NULL;
gchar *load_path = NULL; gchar *load_path = NULL;
GOptionEntry options[] = { GOptionEntry options[] = {
@ -438,6 +447,10 @@ main (int argc, gchar ** argv)
"Save project to file before rendering", "<path>"}, "Save project to file before rendering", "<path>"},
{"load", 'q', 0, G_OPTION_ARG_STRING, &load_path, {"load", 'q', 0, G_OPTION_ARG_STRING, &load_path,
"Load project from file before rendering", "<path>"}, "Load project from file before rendering", "<path>"},
{"verbose", 0, 0, G_OPTION_ARG_NONE, &verbose,
"Output status information and property notifications", NULL},
{"exclude", 'X', 0, G_OPTION_ARG_NONE, &exclude_args,
"Do not output status information of TYPE", "TYPE1,TYPE2,..."},
{NULL} {NULL}
}; };
GOptionContext *ctx; GOptionContext *ctx;
@ -497,8 +510,15 @@ main (int argc, gchar ** argv)
g_option_context_free (ctx); g_option_context_free (ctx);
/* normalize */
if (strcmp (audio, "none") == 0)
audio = NULL;
if (strcmp (video, "none") == 0)
video = NULL;
/* Create the pipeline */ /* Create the pipeline */
pipeline = create_pipeline (load_path, save_path, argc - 1, argv + 1); pipeline = create_pipeline (load_path, save_path, argc - 1, argv + 1,
audio, video);
if (!pipeline) if (!pipeline)
exit (1); exit (1);
@ -521,6 +541,13 @@ main (int argc, gchar ** argv)
ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW); ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW);
} }
if (verbose) {
gchar **exclude_list =
exclude_args ? g_strsplit (exclude_args, ",", 0) : NULL;
g_signal_connect (pipeline, "deep-notify",
G_CALLBACK (gst_object_default_deep_notify), exclude_list);
}
/* Play the pipeline */ /* Play the pipeline */
mainloop = g_main_loop_new (NULL, FALSE); mainloop = g_main_loop_new (NULL, FALSE);