ges: Add support for gessrc as subtimeline element

Until now we have always had `gesdemux` as subtimeline elements,
the behavior when subtimelines are sources is different so we need
to support that.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/4882>
This commit is contained in:
Thibault Saunier 2023-06-17 07:40:42 -04:00 committed by GStreamer Marge Bot
parent a5d5dd2ab4
commit 2b3757402b
4 changed files with 67 additions and 24 deletions

View file

@ -188,6 +188,9 @@ ges_timeline_set_smart_rendering (GESTimeline * timeline, gboolean rendering_sma
G_GNUC_INTERNAL gboolean
ges_timeline_get_smart_rendering (GESTimeline *timeline);
G_GNUC_INTERNAL GstStreamCollection*
ges_timeline_get_stream_collection (GESTimeline *timeline);
G_GNUC_INTERNAL void
ges_auto_transition_set_source (GESAutoTransition * self, GESTrackElement * source, GESEdge edge);
@ -497,6 +500,7 @@ ges_source_get_rendering_smartly (GESSource *source);
G_GNUC_INTERNAL void ges_track_set_smart_rendering (GESTrack* track, gboolean rendering_smartly);
G_GNUC_INTERNAL GstElement * ges_track_get_composition (GESTrack *track);
G_GNUC_INTERNAL void ges_track_select_subtimeline_streams (GESTrack *track, GstStreamCollection *collection, GstElement *subtimeline);
/*********************************************

View file

@ -2159,6 +2159,12 @@ ges_timeline_get_smart_rendering (GESTimeline * timeline)
return timeline->priv->rendering_smartly;
}
GstStreamCollection *
ges_timeline_get_stream_collection (GESTimeline * timeline)
{
return gst_object_ref (timeline->priv->stream_collection);
}
/**** API *****/
/**
* ges_timeline_new:

View file

@ -454,40 +454,50 @@ ges_track_change_state (GstElement * element, GstStateChange transition)
transition);
}
void
ges_track_select_subtimeline_streams (GESTrack * track,
GstStreamCollection * collection, GstElement * subtimeline)
{
GList *selected_streams = NULL;
for (gint i = 0; i < gst_stream_collection_get_size (collection); i++) {
GstStream *stream = gst_stream_collection_get_stream (collection, i);
GstStreamType stype = gst_stream_get_stream_type (stream);
if ((track->type == GES_TRACK_TYPE_VIDEO && stype == GST_STREAM_TYPE_VIDEO)
|| (track->type == GES_TRACK_TYPE_AUDIO
&& stype == GST_STREAM_TYPE_AUDIO)
|| (stype == GST_STREAM_TYPE_UNKNOWN)) {
selected_streams =
g_list_append (selected_streams,
g_strdup (gst_stream_get_stream_id (stream)));
}
}
if (selected_streams) {
gst_element_send_event (subtimeline,
gst_event_new_select_streams (selected_streams));
g_list_free_full (selected_streams, g_free);
}
}
static void
ges_track_handle_message (GstBin * bin, GstMessage * message)
{
GESTrack *track = GES_TRACK (bin);
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_STREAM_COLLECTION) {
gint i;
GList *selected_streams = NULL;
GstStreamCollection *collection;
gst_message_parse_stream_collection (message, &collection);
for (i = 0; i < gst_stream_collection_get_size (collection); i++) {
GstStream *stream = gst_stream_collection_get_stream (collection, i);
GstStreamType stype = gst_stream_get_stream_type (stream);
if ((track->type == GES_TRACK_TYPE_VIDEO
&& stype == GST_STREAM_TYPE_VIDEO)
|| (track->type == GES_TRACK_TYPE_AUDIO
&& stype == GST_STREAM_TYPE_AUDIO)
|| (stype == GST_STREAM_TYPE_UNKNOWN)) {
selected_streams =
g_list_append (selected_streams,
(gchar *) gst_stream_get_stream_id (stream));
}
}
if (selected_streams) {
gst_element_send_event (GST_ELEMENT (GST_MESSAGE_SRC (message)),
gst_event_new_select_streams (selected_streams));
g_list_free (selected_streams);
if (GES_IS_TIMELINE (GST_MESSAGE_SRC (message))) {
ges_track_select_subtimeline_streams (track, collection,
GST_ELEMENT (GST_MESSAGE_SRC (message)));
}
}
gst_element_post_message (GST_ELEMENT_CAST (bin), message);
}

View file

@ -123,6 +123,27 @@ done:
return res;
}
static void
source_setup_cb (GstElement * decodebin, GstElement * source,
GESUriSource * self)
{
GstElementFactory *factory = gst_element_get_factory (source);
if (!factory || g_strcmp0 (GST_OBJECT_NAME (factory), "gessrc")) {
return;
}
GESTrack *track = ges_track_element_get_track (self->element);
GESTimeline *subtimeline;
g_object_get (source, "timeline", &subtimeline, NULL);
GstStreamCollection *subtimeline_collection =
ges_timeline_get_stream_collection (subtimeline);
ges_track_select_subtimeline_streams (track, subtimeline_collection,
GST_ELEMENT (subtimeline));
}
GstElement *
ges_uri_source_create_source (GESUriSource * self)
{
@ -139,13 +160,15 @@ ges_uri_source_create_source (GESUriSource * self)
if (track)
caps = ges_track_get_caps (track);
g_signal_connect (decodebin, "source-setup",
G_CALLBACK (source_setup_cb), self);
g_object_set (decodebin, "caps", caps,
"expose-all-streams", FALSE, "uri", self->uri, NULL);
g_signal_connect (decodebin, "autoplug-select",
G_CALLBACK (autoplug_select_cb), self);
return decodebin;
}
static void