ges: Avoid adding unnecessary converters for nested timelines

Basically we know that if we are using mixing, compositor will be
able to do video conversion and scaling for us, so avoid adding those
usless elements.

This optimizes a lot caps negotiation for deeply nested timelines.
This commit is contained in:
Thibault Saunier 2020-02-10 18:05:38 -03:00
parent f30b7f05b5
commit 0be8bc9d98
7 changed files with 72 additions and 34 deletions

View file

@ -125,6 +125,7 @@ ges_audio_source_create_element (GESTrackElement * trksrc)
GstElement *volume, *vbin;
GstElement *topbin;
GstElement *sub_element;
GPtrArray *elements;
GESAudioSourceClass *source_class = GES_AUDIO_SOURCE_GET_CLASS (trksrc);
const gchar *props[] = { "volume", "mute", NULL };
GESAudioSource *self = GES_AUDIO_SOURCE (trksrc);
@ -139,7 +140,10 @@ ges_audio_source_create_element (GESTrackElement * trksrc)
gst_parse_bin_from_description
("audioconvert ! audioresample ! volume name=v ! capsfilter name=audio-track-caps-filter",
TRUE, NULL);
topbin = ges_source_create_topbin ("audiosrcbin", sub_element, vbin, NULL);
elements = g_ptr_array_new ();
g_ptr_array_add (elements, vbin);
topbin = ges_source_create_topbin ("audiosrcbin", sub_element, elements);
g_ptr_array_free (elements, TRUE);
volume = gst_bin_get_by_name (GST_BIN (vbin), "v");
self->priv->capsfilter = gst_bin_get_by_name (GST_BIN (vbin),
"audio-track-caps-filter");

View file

@ -403,9 +403,9 @@ G_GNUC_INTERNAL void ges_track_element_copy_bindings (GESTrackElement *element,
GESTrackElement *new_element,
guint64 position);
G_GNUC_INTERNAL GstElement *ges_source_create_topbin (const gchar * bin_name, GstElement * sub_element, ...);
G_GNUC_INTERNAL void ges_track_set_caps (GESTrack *track,
const GstCaps *caps);
G_GNUC_INTERNAL GstElement* ges_source_create_topbin(const gchar* bin_name, GstElement* sub_element, GPtrArray* elements);
G_GNUC_INTERNAL void ges_track_set_caps(GESTrack* track,
const GstCaps* caps);
G_GNUC_INTERNAL GstElement * ges_track_get_composition (GESTrack *track);

View file

@ -80,21 +80,21 @@ _ghost_pad_added_cb (GstElement * element, GstPad * srcpad, GstElement * bin)
}
GstElement *
ges_source_create_topbin (const gchar * bin_name, GstElement * sub_element, ...)
ges_source_create_topbin (const gchar * bin_name, GstElement * sub_element,
GPtrArray * elements)
{
va_list argp;
GstElement *element;
GstElement *prev = NULL;
GstElement *first = NULL;
GstElement *bin;
GstPad *sub_srcpad;
gint i;
va_start (argp, sub_element);
bin = gst_bin_new (bin_name);
gst_bin_add (GST_BIN (bin), sub_element);
while ((element = va_arg (argp, GstElement *)) != NULL) {
for (i = 0; i < elements->len; i++) {
element = elements->pdata[i];
gst_bin_add (GST_BIN (bin), element);
if (prev) {
if (!gst_element_link_pads_full (prev, "src", element, "sink",
@ -102,15 +102,12 @@ ges_source_create_topbin (const gchar * bin_name, GstElement * sub_element, ...)
g_error ("Could not link %s and %s",
GST_OBJECT_NAME (prev), GST_OBJECT_NAME (element));
}
}
prev = element;
if (first == NULL)
first = element;
}
va_end (argp);
sub_srcpad = gst_element_get_static_pad (sub_element, "src");
if (prev != NULL) {

View file

@ -96,15 +96,15 @@ ges_video_source_create_element (GESTrackElement * trksrc)
{
GstElement *topbin;
GstElement *sub_element;
GstElement *queue = gst_element_factory_make ("queue", NULL);
GESVideoSourceClass *source_class = GES_VIDEO_SOURCE_GET_CLASS (trksrc);
GESVideoSource *self;
GstElement *positioner, *videoflip, *videoscale, *videorate, *capsfilter,
*videoconvert, *deinterlace;
GstElement *positioner, *videoflip, *capsfilter, *deinterlace;
const gchar *positioner_props[] =
{ "alpha", "posx", "posy", "width", "height", NULL };
const gchar *deinterlace_props[] = { "mode", "fields", "tff", NULL };
const gchar *videoflip_props[] = { "video-direction", NULL };
gboolean needs_converters = TRUE;
GPtrArray *elements;
if (!source_class->create_source)
return NULL;
@ -112,26 +112,36 @@ ges_video_source_create_element (GESTrackElement * trksrc)
sub_element = source_class->create_source (trksrc);
self = (GESVideoSource *) trksrc;
if (source_class->ABI.abi.needs_converters)
needs_converters = source_class->ABI.abi.needs_converters (self);
elements = g_ptr_array_new ();
g_ptr_array_add (elements, gst_element_factory_make ("queue", NULL));
/* That positioner will add metadata to buffers according to its
properties, acting like a proxy for our smart-mixer dynamic pads. */
positioner = gst_element_factory_make ("framepositioner", "frame_tagger");
g_object_set (positioner, "zorder",
G_MAXUINT - GES_TIMELINE_ELEMENT_PRIORITY (self), NULL);
g_ptr_array_add (elements, positioner);
/* If there's image-orientation tag, make sure the image is correctly oriented
* before we scale it. */
videoflip = gst_element_factory_make ("videoflip", "track-element-videoflip");
g_object_set (videoflip, "video-direction", GST_VIDEO_ORIENTATION_AUTO, NULL);
g_ptr_array_add (elements, videoflip);
videoscale =
gst_element_factory_make ("videoscale", "track-element-videoscale");
videoconvert =
gst_element_factory_make ("videoconvert", "track-element-videoconvert");
videorate = gst_element_factory_make ("videorate", "track-element-videorate");
deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
if (needs_converters) {
g_ptr_array_add (elements, gst_element_factory_make ("videoscale",
"track-element-videoscale"));
g_ptr_array_add (elements, gst_element_factory_make ("videoconvert",
"track-element-videoconvert"));
}
g_ptr_array_add (elements, gst_element_factory_make ("videorate",
"track-element-videorate"));
capsfilter =
gst_element_factory_make ("capsfilter", "track-element-capsfilter");
g_ptr_array_add (elements, capsfilter);
ges_frame_positioner_set_source_and_filter (GST_FRAME_POSITIONNER
(positioner), trksrc, capsfilter);
@ -141,24 +151,20 @@ ges_video_source_create_element (GESTrackElement * trksrc)
ges_track_element_add_children_props (trksrc, videoflip, NULL, NULL,
videoflip_props);
deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
if (deinterlace == NULL) {
post_missing_element_message (sub_element, "deinterlace");
GST_ELEMENT_WARNING (sub_element, CORE, MISSING_PLUGIN,
("Missing element '%s' - check your GStreamer installation.",
"deinterlace"), ("deinterlacing won't work"));
topbin =
ges_source_create_topbin ("videosrcbin", sub_element, queue,
videoconvert, positioner, videoflip, videoscale, videorate, capsfilter,
NULL);
} else {
g_ptr_array_add (elements, deinterlace);
ges_track_element_add_children_props (trksrc, deinterlace, NULL, NULL,
deinterlace_props);
topbin =
ges_source_create_topbin ("videosrcbin", sub_element, queue,
videoconvert, deinterlace, positioner, videoflip, videoscale, videorate,
capsfilter, NULL);
}
topbin = ges_source_create_topbin ("videosrcbin", sub_element, elements);
g_ptr_array_free (elements, TRUE);
self->priv->positioner = GST_FRAME_POSITIONNER (positioner);
self->priv->positioner->scale_in_compositor =

View file

@ -84,6 +84,7 @@ struct _GESVideoSourceClass {
gpointer _ges_reserved[GES_PADDING];
struct {
gboolean disable_scale_in_compositor;
gboolean (*needs_converters)(GESVideoSource *self);
} abi;
} ABI;
};

View file

@ -64,8 +64,9 @@ ges_video_test_source_create_source (GESTrackElement * self)
{
GstCaps *caps;
gint pattern;
GstElement *testsrc, *capsfilter;
GstElement *testsrc, *capsfilter, *res;
const gchar *props[] = { "pattern", NULL };
GPtrArray *elements;
testsrc = gst_element_factory_make ("videotestsrc", NULL);
capsfilter = gst_element_factory_make ("capsfilter", NULL);
@ -73,13 +74,18 @@ ges_video_test_source_create_source (GESTrackElement * self)
g_object_set (testsrc, "pattern", pattern, NULL);
elements = g_ptr_array_new ();
g_ptr_array_add (elements, capsfilter);
caps = gst_caps_new_empty_simple ("video/x-raw");
g_object_set (capsfilter, "caps", caps, NULL);
gst_caps_unref (caps);
ges_track_element_add_children_props (self, testsrc, NULL, NULL, props);
return ges_source_create_topbin ("videotestsrc", testsrc, capsfilter, NULL);
res = ges_source_create_topbin ("videotestsrc", testsrc, elements);
g_ptr_array_free (elements, TRUE);
return res;
}
/**

View file

@ -91,6 +91,28 @@ ges_video_uri_source_create_source (GESTrackElement * trksrc)
return decodebin;
}
static gboolean
ges_video_uri_source_needs_converters (GESVideoSource * source)
{
GESTrack *track = ges_track_element_get_track (GES_TRACK_ELEMENT (source));
if (!track || ges_track_get_mixing (track)) {
GESAsset *asset = ges_asset_request (GES_TYPE_URI_CLIP,
GES_VIDEO_URI_SOURCE (source)->uri, NULL);
gboolean is_nested = FALSE;
g_assert (asset);
g_object_get (asset, "is-nested-timeline", &is_nested, NULL);
gst_object_unref (asset);
return !is_nested;
}
return FALSE;
}
/* Extractable interface implementation */
static gchar *
@ -100,14 +122,14 @@ ges_extractable_check_id (GType type, const gchar * id, GError ** error)
}
static void
extractable_set_asset (GESExtractable * self, GESAsset * asset)
extractable_set_asset (GESExtractable * extractable, GESAsset * asset)
{
/* FIXME That should go into #GESTrackElement, but
* some work is needed to make sure it works properly */
if (ges_track_element_get_track_type (GES_TRACK_ELEMENT (self)) ==
if (ges_track_element_get_track_type (GES_TRACK_ELEMENT (extractable)) ==
GES_TRACK_TYPE_UNKNOWN) {
ges_track_element_set_track_type (GES_TRACK_ELEMENT (self),
ges_track_element_set_track_type (GES_TRACK_ELEMENT (extractable),
ges_track_element_asset_get_track_type (GES_TRACK_ELEMENT_ASSET
(asset)));
}
@ -194,6 +216,8 @@ ges_video_uri_source_class_init (GESVideoUriSourceClass * klass)
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY));
source_class->create_source = ges_video_uri_source_create_source;
source_class->ABI.abi.needs_converters =
ges_video_uri_source_needs_converters;
}
static void