ges: Deprecate GESImageSource and GESMultiFileSource

Refactoring GESVideoSource so that #GESUriVideoSource can handle
still image in a simple way

MultiFileSource has been replaced with the new `imagesequencesrc`
element, this was totally broken anyway as `multifilesrc` can not seek
properly.
This commit is contained in:
Thibault Saunier 2020-03-24 22:35:35 -03:00
parent f99034a522
commit ddaf67fed3
9 changed files with 157 additions and 69 deletions

View file

@ -24,9 +24,13 @@
* @short_description: outputs the video stream from a media file as a still * @short_description: outputs the video stream from a media file as a still
* image. * image.
* *
* Outputs the video stream from a given file as a still frame. The frame * Outputs the video stream from a given file as a still frame. The frame chosen
* chosen will be determined by the in-point property on the track element. For * will be determined by the in-point property on the track element. For image
* image files, do not set the in-point property. * files, do not set the in-point property.
*
* Deprecated: 1.18: This won't be used anymore and has been replaced by
* #GESUriSource instead which now plugs an `imagefreeze` element when
* #ges_uri_source_asset_is_image returns %TRUE.
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
@ -182,9 +186,7 @@ ges_image_source_init (GESImageSource * self)
self->priv = ges_image_source_get_instance_private (self); self->priv = ges_image_source_get_instance_private (self);
} }
/** /* @uri: the URI the source should control
* ges_image_source_new:
* @uri: the URI the source should control
* *
* Creates a new #GESImageSource for the provided @uri. * Creates a new #GESImageSource for the provided @uri.
* *
@ -193,6 +195,12 @@ ges_image_source_init (GESImageSource * self)
GESImageSource * GESImageSource *
ges_image_source_new (gchar * uri) ges_image_source_new (gchar * uri)
{ {
return g_object_new (GES_TYPE_IMAGE_SOURCE, "uri", uri, "track-type", GESImageSource *res;
GES_TRACK_TYPE_VIDEO, NULL); GESAsset *asset = ges_asset_request (GES_TYPE_IMAGE_SOURCE, uri, NULL);
res = GES_IMAGE_SOURCE (ges_asset_extract (asset, NULL));
res->uri = g_strdup (uri);
gst_object_unref (asset);
return res;
} }

View file

@ -22,8 +22,14 @@
* @title: GESMultiFileSource * @title: GESMultiFileSource
* @short_description: outputs the video stream from a sequence of images. * @short_description: outputs the video stream from a sequence of images.
* *
* Outputs the video stream from a given image sequence. The start frame * Outputs the video stream from a given image sequence. The start frame chosen
* chosen will be determined by the in-point property on the track element. * will be determined by the in-point property on the track element.
*
* This should not be used anymore, the `imagesequence://` protocol should be
* used instead. Check the #imagesequencesrc GStreamer element for more
* information.
*
* Deprecated: 1.18: Use #GESUriSource instead
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
@ -257,9 +263,7 @@ ges_multi_file_source_init (GESMultiFileSource * self)
self->priv = ges_multi_file_source_get_instance_private (self); self->priv = ges_multi_file_source_get_instance_private (self);
} }
/** /* @uri: the URI the source should control
* ges_multi_file_source_new:
* @uri: the URI the source should control
* *
* Creates a new #GESMultiFileSource for the provided @uri. * Creates a new #GESMultiFileSource for the provided @uri.
* *
@ -268,6 +272,12 @@ ges_multi_file_source_init (GESMultiFileSource * self)
GESMultiFileSource * GESMultiFileSource *
ges_multi_file_source_new (gchar * uri) ges_multi_file_source_new (gchar * uri)
{ {
return g_object_new (GES_TYPE_MULTI_FILE_SOURCE, "uri", uri, GESMultiFileSource *res;
"track-type", GES_TRACK_TYPE_VIDEO, NULL); GESAsset *asset = ges_asset_request (GES_TYPE_MULTI_FILE_SOURCE, uri, NULL);
res = GES_MULTI_FILE_SOURCE (ges_asset_extract (asset, NULL));
res->uri = g_strdup (uri);
gst_object_unref (asset);
return res;
} }

View file

@ -358,8 +358,8 @@ static void
_create_uri_source_asset (GESUriClipAsset * asset, _create_uri_source_asset (GESUriClipAsset * asset,
GstDiscovererStreamInfo * sinfo, GESTrackType type) GstDiscovererStreamInfo * sinfo, GESTrackType type)
{ {
GESAsset *tck_filesource_asset; GESAsset *src_asset;
GESUriSourceAssetPrivate *priv_tckasset; GESUriSourceAssetPrivate *src_priv;
GESUriClipAssetPrivate *priv = asset->priv; GESUriClipAssetPrivate *priv = asset->priv;
gchar *stream_id = gchar *stream_id =
g_strdup (gst_discoverer_stream_info_get_stream_id (sinfo)); g_strdup (gst_discoverer_stream_info_get_stream_id (sinfo));
@ -371,22 +371,22 @@ _create_uri_source_asset (GESUriClipAsset * asset,
} }
if (type == GES_TRACK_TYPE_VIDEO) if (type == GES_TRACK_TYPE_VIDEO)
tck_filesource_asset = ges_asset_request (GES_TYPE_VIDEO_URI_SOURCE, src_asset = ges_asset_request (GES_TYPE_VIDEO_URI_SOURCE, stream_id, NULL);
stream_id, NULL);
else else
tck_filesource_asset = ges_asset_request (GES_TYPE_AUDIO_URI_SOURCE, src_asset = ges_asset_request (GES_TYPE_AUDIO_URI_SOURCE, stream_id, NULL);
stream_id, NULL);
g_free (stream_id); g_free (stream_id);
priv_tckasset = GES_URI_SOURCE_ASSET (tck_filesource_asset)->priv; src_priv = GES_URI_SOURCE_ASSET (src_asset)->priv;
priv_tckasset->uri = ges_asset_get_id (GES_ASSET (asset)); src_priv->uri = ges_asset_get_id (GES_ASSET (asset));
priv_tckasset->sinfo = gst_object_ref (sinfo); src_priv->sinfo = gst_object_ref (sinfo);
priv_tckasset->parent_asset = asset; src_priv->parent_asset = asset;
ges_track_element_asset_set_track_type (GES_TRACK_ELEMENT_ASSET ges_track_element_asset_set_track_type (GES_TRACK_ELEMENT_ASSET
(tck_filesource_asset), type); (src_asset), type);
priv->asset_trackfilesources = g_list_append (priv->asset_trackfilesources, priv->is_image |=
tck_filesource_asset); ges_uri_source_asset_is_image (GES_URI_SOURCE_ASSET (src_asset));
priv->asset_trackfilesources =
g_list_append (priv->asset_trackfilesources, src_asset);
} }
static void static void
@ -416,9 +416,6 @@ ges_uri_clip_asset_set_info (GESUriClipAsset * self, GstDiscovererInfo * info)
supportedformats = GES_TRACK_TYPE_VIDEO; supportedformats = GES_TRACK_TYPE_VIDEO;
else else
supportedformats |= GES_TRACK_TYPE_VIDEO; supportedformats |= GES_TRACK_TYPE_VIDEO;
if (gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
sinf))
priv->is_image = TRUE;
type = GES_TRACK_TYPE_VIDEO; type = GES_TRACK_TYPE_VIDEO;
} }
@ -812,12 +809,8 @@ _extract (GESAsset * asset, GError ** error)
uri = g_strdup (priv->uri); uri = g_strdup (priv->uri);
if (g_str_has_prefix (priv->uri, GES_MULTI_FILE_URI_PREFIX)) { if (g_str_has_prefix (priv->uri, GES_MULTI_FILE_URI_PREFIX))
trackelement = GES_TRACK_ELEMENT (ges_multi_file_source_new (uri)); trackelement = GES_TRACK_ELEMENT (ges_multi_file_source_new (uri));
} else if (GST_IS_DISCOVERER_VIDEO_INFO (priv->sinfo)
&& gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
priv->sinfo))
trackelement = GES_TRACK_ELEMENT (ges_image_source_new (uri));
else if (GST_IS_DISCOVERER_VIDEO_INFO (priv->sinfo)) else if (GST_IS_DISCOVERER_VIDEO_INFO (priv->sinfo))
trackelement = GES_TRACK_ELEMENT (ges_video_uri_source_new (uri)); trackelement = GES_TRACK_ELEMENT (ges_video_uri_source_new (uri));
else else
@ -906,6 +899,27 @@ ges_uri_source_asset_get_filesource_asset (GESUriSourceAsset * asset)
return asset->priv->parent_asset; return asset->priv->parent_asset;
} }
/**
* ges_uri_source_asset_is_image:
* @asset: A #GESUriClipAsset
*
* Check if @asset contains a single image
*
* Returns: %TRUE if the video stream corresponds to an image (i.e. only
* contains one frame)
*/
gboolean
ges_uri_source_asset_is_image (GESUriSourceAsset * asset)
{
g_return_val_if_fail (GES_IS_URI_SOURCE_ASSET (asset), FALSE);
if (!GST_IS_DISCOVERER_VIDEO_INFO (asset->priv->sinfo))
return FALSE;
return gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
asset->priv->sinfo);
}
void void
_ges_uri_asset_cleanup (void) _ges_uri_asset_cleanup (void)
{ {

View file

@ -108,5 +108,7 @@ GES_API
const gchar * ges_uri_source_asset_get_stream_uri (GESUriSourceAsset *asset); const gchar * ges_uri_source_asset_get_stream_uri (GESUriSourceAsset *asset);
GES_API GES_API
const GESUriClipAsset *ges_uri_source_asset_get_filesource_asset (GESUriSourceAsset *asset); const GESUriClipAsset *ges_uri_source_asset_get_filesource_asset (GESUriSourceAsset *asset);
GES_API
gboolean ges_uri_source_asset_is_image (GESUriSourceAsset *asset);
G_END_DECLS G_END_DECLS

View file

@ -113,31 +113,17 @@ post_missing_element_message (GstElement * element, const gchar * name)
gst_element_post_message (element, msg); gst_element_post_message (element, msg);
} }
static GstElement * static gboolean
ges_video_source_create_element (GESTrackElement * trksrc) ges_video_source_create_filters (GESVideoSource * self, GPtrArray * elements,
gboolean needs_converters)
{ {
GstElement *topbin; GESTrackElement *trksrc = GES_TRACK_ELEMENT (self);
GstElement *sub_element;
GESVideoSourceClass *source_class = GES_VIDEO_SOURCE_GET_CLASS (trksrc);
GESVideoSource *self;
GstElement *positioner, *videoflip, *capsfilter, *deinterlace; GstElement *positioner, *videoflip, *capsfilter, *deinterlace;
const gchar *positioner_props[] = const gchar *positioner_props[] =
{ "alpha", "posx", "posy", "width", "height", NULL }; { "alpha", "posx", "posy", "width", "height", NULL };
const gchar *deinterlace_props[] = { "mode", "fields", "tff", NULL }; const gchar *deinterlace_props[] = { "mode", "fields", "tff", NULL };
const gchar *videoflip_props[] = { "video-direction", NULL }; const gchar *videoflip_props[] = { "video-direction", NULL };
gboolean needs_converters = TRUE;
GPtrArray *elements;
if (!source_class->create_source)
return NULL;
sub_element = source_class->create_source (trksrc);
self = (GESVideoSource *) trksrc;
if (source_class->ABI.abi.needs_converters)
needs_converters = source_class->ABI.abi.needs_converters (self);
elements = g_ptr_array_new ();
g_ptr_array_add (elements, gst_element_factory_make ("queue", NULL)); g_ptr_array_add (elements, gst_element_factory_make ("queue", NULL));
/* That positioner will add metadata to buffers according to its /* That positioner will add metadata to buffers according to its
@ -175,9 +161,11 @@ ges_video_source_create_element (GESTrackElement * trksrc)
deinterlace = gst_element_factory_make ("deinterlace", "deinterlace"); deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
if (deinterlace == NULL) { if (deinterlace == NULL) {
post_missing_element_message (sub_element, "deinterlace"); post_missing_element_message (ges_track_element_get_nleobject (trksrc),
"deinterlace");
GST_ELEMENT_WARNING (sub_element, CORE, MISSING_PLUGIN, GST_ELEMENT_WARNING (ges_track_element_get_nleobject (trksrc), CORE,
MISSING_PLUGIN,
("Missing element '%s' - check your GStreamer installation.", ("Missing element '%s' - check your GStreamer installation.",
"deinterlace"), ("deinterlacing won't work")); "deinterlace"), ("deinterlacing won't work"));
} else { } else {
@ -185,8 +173,6 @@ ges_video_source_create_element (GESTrackElement * trksrc)
ges_track_element_add_children_props (trksrc, deinterlace, NULL, NULL, ges_track_element_add_children_props (trksrc, deinterlace, NULL, NULL,
deinterlace_props); deinterlace_props);
} }
topbin = ges_source_create_topbin ("videosrcbin", sub_element, elements);
g_ptr_array_free (elements, TRUE);
self->priv->positioner = GST_FRAME_POSITIONNER (positioner); self->priv->positioner = GST_FRAME_POSITIONNER (positioner);
self->priv->positioner->scale_in_compositor = self->priv->positioner->scale_in_compositor =
@ -197,6 +183,39 @@ ges_video_source_create_element (GESTrackElement * trksrc)
self->priv->capsfilter = capsfilter; self->priv->capsfilter = capsfilter;
return TRUE;
}
static GstElement *
ges_video_source_create_element (GESTrackElement * trksrc)
{
GstElement *topbin;
GstElement *sub_element;
GESVideoSourceClass *source_class = GES_VIDEO_SOURCE_GET_CLASS (trksrc);
GESVideoSource *self;
gboolean needs_converters = TRUE;
GPtrArray *elements;
if (!source_class->create_source)
return NULL;
sub_element = source_class->create_source (trksrc);
self = (GESVideoSource *) trksrc;
if (source_class->ABI.abi.needs_converters)
needs_converters = source_class->ABI.abi.needs_converters (self);
elements = g_ptr_array_new ();
g_assert (source_class->ABI.abi.create_filters);
if (!source_class->ABI.abi.create_filters (self, elements, needs_converters)) {
g_ptr_array_free (elements, TRUE);
return NULL;
}
topbin = ges_source_create_topbin ("videosrcbin", sub_element, elements);
g_ptr_array_free (elements, TRUE);
return topbin; return topbin;
} }
@ -246,6 +265,7 @@ ges_video_source_class_init (GESVideoSourceClass * klass)
track_element_class->ABI.abi.default_track_type = GES_TRACK_TYPE_VIDEO; track_element_class->ABI.abi.default_track_type = GES_TRACK_TYPE_VIDEO;
video_source_class->create_source = NULL; video_source_class->create_source = NULL;
video_source_class->ABI.abi.create_filters = ges_video_source_create_filters;
} }
static void static void

View file

@ -69,6 +69,7 @@ struct _GESVideoSourceClass {
gboolean disable_scale_in_compositor; gboolean disable_scale_in_compositor;
gboolean (*needs_converters)(GESVideoSource *self); gboolean (*needs_converters)(GESVideoSource *self);
gboolean (*get_natural_size)(GESVideoSource* self, gint* width, gint* height); gboolean (*get_natural_size)(GESVideoSource* self, gint* width, gint* height);
gboolean (*create_filters)(GESVideoSource *self, GPtrArray *filters, gboolean needs_converters);
} abi; } abi;
} ABI; } ABI;
}; };

View file

@ -218,16 +218,32 @@ G_DEFINE_TYPE_WITH_CODE (GESVideoUriSource, ges_video_uri_source,
/* GObject VMethods */ /* GObject VMethods */
static gboolean
ges_video_uri_source_create_filters (GESVideoSource * source,
GPtrArray * elements, gboolean needs_converters)
{
GESAsset *asset = ges_extractable_get_asset (GES_EXTRACTABLE (source));
g_assert (GES_IS_URI_SOURCE_ASSET (asset));
if (!GES_VIDEO_SOURCE_CLASS (ges_video_uri_source_parent_class)
->ABI.abi.create_filters (source, elements, needs_converters))
return FALSE;
if (ges_uri_source_asset_is_image (GES_URI_SOURCE_ASSET (asset)))
g_ptr_array_add (elements, gst_element_factory_make ("imagefreeze", NULL));
return TRUE;
}
static void static void
ges_video_uri_source_get_property (GObject * object, guint property_id, ges_video_uri_source_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec) GValue * value, GParamSpec * pspec)
{ {
GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object); GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
switch (property_id) { switch (property_id) {
case PROP_URI: case PROP_URI:
g_value_set_string (value, uriclip->uri); g_value_set_string (value, urisource->uri);
break; break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
@ -238,15 +254,15 @@ static void
ges_video_uri_source_set_property (GObject * object, guint property_id, ges_video_uri_source_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec) const GValue * value, GParamSpec * pspec)
{ {
GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object); GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
switch (property_id) { switch (property_id) {
case PROP_URI: case PROP_URI:
if (uriclip->uri) { if (urisource->uri) {
GST_WARNING_OBJECT (object, "Uri already set to %s", uriclip->uri); GST_WARNING_OBJECT (object, "Uri already set to %s", urisource->uri);
return; return;
} }
uriclip->uri = g_value_dup_string (value); urisource->uri = g_value_dup_string (value);
break; break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
@ -256,10 +272,9 @@ ges_video_uri_source_set_property (GObject * object, guint property_id,
static void static void
ges_video_uri_source_dispose (GObject * object) ges_video_uri_source_dispose (GObject * object)
{ {
GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object); GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
if (uriclip->uri) g_free (urisource->uri);
g_free (uriclip->uri);
G_OBJECT_CLASS (ges_video_uri_source_parent_class)->dispose (object); G_OBJECT_CLASS (ges_video_uri_source_parent_class)->dispose (object);
} }
@ -288,6 +303,7 @@ ges_video_uri_source_class_init (GESVideoUriSourceClass * klass)
ges_video_uri_source_needs_converters; ges_video_uri_source_needs_converters;
source_class->ABI.abi.get_natural_size = source_class->ABI.abi.get_natural_size =
ges_video_uri_source_get_natural_size; ges_video_uri_source_get_natural_size;
source_class->ABI.abi.create_filters = ges_video_uri_source_create_filters;
} }
static void static void

View file

@ -260,7 +260,7 @@ GST_START_TEST (test_filesource_images)
fail_unless (GES_TIMELINE_ELEMENT_PARENT (track_element) == fail_unless (GES_TIMELINE_ELEMENT_PARENT (track_element) ==
GES_TIMELINE_ELEMENT (clip)); GES_TIMELINE_ELEMENT (clip));
fail_unless (ges_track_element_get_track (track_element) == v); fail_unless (ges_track_element_get_track (track_element) == v);
fail_unless (GES_IS_IMAGE_SOURCE (track_element)); fail_unless (GES_IS_VIDEO_URI_SOURCE (track_element));
ASSERT_OBJECT_REFCOUNT (track_element, "1 in track, 1 in clip 2 in timeline", ASSERT_OBJECT_REFCOUNT (track_element, "1 in track, 1 in clip 2 in timeline",
3); 3);

View file

@ -277,3 +277,20 @@ class TestTrackElements(common.GESSimpleTimelineTest):
self.assertTrue(self.layer.remove_clip(clip2)) self.assertTrue(self.layer.remove_clip(clip2))
self.assertTrue(clip1.add(clip2_child)) self.assertTrue(clip1.add(clip2_child))
def test_image_source_asset(self):
asset = GES.UriClipAsset.request_sync(common.get_asset_uri("png.png"))
clip = self.layer.add_asset(asset, 0, 0, Gst.SECOND, GES.TrackType.UNKNOWN)
image_src, = clip.get_children(True)
self.assertTrue(image_src.get_asset().is_image())
self.assertTrue(isinstance(image_src, GES.VideoUriSource))
imagefreeze, = [e for e in image_src.get_nleobject().iterate_recurse()
if e.get_factory().get_name() == "imagefreeze"]
asset = GES.UriClipAsset.request_sync(common.get_asset_uri("audio_video.ogg"))
clip = self.layer.add_asset(asset, Gst.SECOND, 0, Gst.SECOND, GES.TrackType.VIDEO)
video_src, = clip.get_children(True)
self.assertEqual([e for e in video_src.get_nleobject().iterate_recurse()
if e.get_factory().get_name() == "imagefreeze"], [])