overlaycomposition: New element that allows applications to draw GstVideoOverlayComposition on a stream

https://bugzilla.gnome.org/show_bug.cgi?id=797234
This commit is contained in:
Sebastian Dröge 2018-10-01 18:10:34 +03:00
parent 596a4ee12f
commit 088b4c0c52
20 changed files with 1676 additions and 4 deletions

View file

@ -501,6 +501,7 @@ AG_GST_CHECK_PLUGIN(audiotestsrc)
AG_GST_CHECK_PLUGIN(encoding)
AG_GST_CHECK_PLUGIN(videoconvert)
AG_GST_CHECK_PLUGIN(gio)
AG_GST_CHECK_PLUGIN(overlaycomposition)
AG_GST_CHECK_PLUGIN(playback)
AG_GST_CHECK_PLUGIN(audioresample)
AG_GST_CHECK_PLUGIN(rawparse)
@ -924,6 +925,7 @@ gst/audiotestsrc/Makefile
gst/encoding/Makefile
gst/videoconvert/Makefile
gst/gio/Makefile
gst/overlaycomposition/Makefile
gst/pbtypes/Makefile
gst/playback/Makefile
gst/rawparse/Makefile
@ -1027,6 +1029,7 @@ tests/examples/gl/gtk/filtervideooverlay/Makefile
tests/examples/gl/cocoa/Makefile
tests/examples/gl/sdl/Makefile
tests/examples/overlay/Makefile
tests/examples/overlaycomposition/Makefile
tests/examples/seek/Makefile
tests/examples/snapshot/Makefile
tests/examples/playback/Makefile

View file

@ -84,6 +84,7 @@ EXTRA_HFILES = \
$(top_srcdir)/gst/playback/gstsubtitleoverlay.h \
$(top_srcdir)/gst/audiorate/gstaudiorate.h \
$(top_srcdir)/gst/audioresample/gstaudioresample.h \
$(top_srcdir)/gst/overlaycomposition/gstoverlaycomposition.h \
$(top_srcdir)/gst/rawparse/gstrawaudioparse.h \
$(top_srcdir)/gst/rawparse/gstrawvideoparse.h \
$(top_srcdir)/gst/rawparse/gstunalignedaudioparse.h \

View file

@ -90,6 +90,7 @@
<xi:include href="xml/element-ogmvideoparse.xml" />
<xi:include href="xml/element-opusdec.xml" />
<xi:include href="xml/element-opusenc.xml" />
<xi:include href="xml/element-overlaycomposition.xml" />
<xi:include href="xml/element-parsebin.xml" />
<xi:include href="xml/element-playbin.xml" />
<xi:include href="xml/element-playbin3.xml" />
@ -146,6 +147,7 @@
<xi:include href="xml/plugin-ogg.xml" />
<xi:include href="xml/plugin-opus.xml" />
<xi:include href="xml/plugin-opengl.xml" />
<xi:include href="xml/plugin-overlaycomposition.xml" />
<xi:include href="xml/plugin-pango.xml" />
<xi:include href="xml/plugin-playback.xml" />
<xi:include href="xml/plugin-rawparse.xml" />

View file

@ -1174,6 +1174,22 @@ GST_TYPE_OPUS_ENC
gst_opus_enc_get_type
</SECTION>
<SECTION>
<FILE>element-overlaycomposition</FILE>
<TITLE>overlaycomposition</TITLE>
GstOverlayComposition
<SUBSECTION Standard>
GstOverlayCompositionClass
GST_OVERLAY_COMPOSITION
GST_OVERLAY_COMPOSITION_CAST
GST_IS_OVERLAY_COMPOSITION
GST_OVERLAY_COMPOSITION_CLASS
GST_IS_OVERLAY_COMPOSITION_CLASS
GST_TYPE_OVERLAY_COMPOSITION
<SUBSECTION Private>
gst_overlay_composition_get_type
</SECTION>
<SECTION>
<FILE>element-parsebin</FILE>
<TITLE>parsebin</TITLE>

View file

@ -1,6 +1,6 @@
foreach plugin : ['adder', 'app', 'audioconvert', 'audiomixer', 'audiorate', 'audioresample',
'audiotestsrc', 'encoding', 'gio', 'pbtypes', 'playback', 'rawparse',
'subparse', 'tcp', 'typefind', 'videoconvert', 'videorate', 'videoscale',
'audiotestsrc', 'encoding', 'gio', 'overlaycomposition', 'pbtypes', 'playback',
'rawparse', 'subparse', 'tcp', 'typefind', 'videoconvert', 'videorate', 'videoscale',
'videotestsrc', 'volume']
if not get_option(plugin).disabled()
subdir(plugin)

View file

@ -0,0 +1,11 @@
noinst_HEADERS = gstoverlaycomposition.h
plugin_LTLIBRARIES = libgstoverlaycomposition.la
libgstoverlaycomposition_la_SOURCES = gstoverlaycomposition.c
libgstoverlaycomposition_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstoverlaycomposition_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstoverlaycomposition_la_LIBADD = \
$(top_builddir)/gst-libs/gst/video/libgstvideo-$(GST_API_VERSION).la \
$(GST_BASE_LIBS) \
$(GST_LIBS)

View file

@ -0,0 +1,879 @@
/* GStreamer
* Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-overlaycomposition
*
* overlaycomposition renders an overlay using a application provided draw function.
*
* A more interesting example can be found at
* https://cgit.freedesktop.org/gstreamer/gst-plugins-base/tree/tests/examples/overlaycomposition/overlaycomposition.c
*
* <refsect2>
* <title>Example code</title>
* |[
*
* #include &lt;gst/gst.h&gt;
* #include &lt;gst/video/video.h&gt;
*
* ...
*
* typedef struct {
* gboolean valid;
* GstVideoInfo info;
* } OverlayState;
*
* ...
*
* static void
* prepare_overlay (GstElement * overlay, GstCaps * caps, gint window_width,
* gint window_height, gpointer user_data)
* {
* OverlayState *s = (OverlayState *)user_data;
*
* if (gst_video_info_from_caps (&amp;s-&gt;info, caps))
* s-&gt;valid = TRUE;
* }
*
* static GstVideoOverlayComposition *
* draw_overlay (GstElement * overlay, GstSample * sample, gpointer user_data)
* {
* OverlayState *s = (OverlayState *)user_data;
* GstBuffer *buffer;
* GstVideoOverlayRectangle *rect;
* GstVideoOverlayComposition *comp;
* GstVideoInfo info;
* GstVideoFrame frame;
* gint x, y;
* guint8 *data;
*
* if (!s-&gt;valid)
* return NULL;
*
* gst_video_info_set_format (&amp;info, GST_VIDEO_FORMAT_BGRA, 16, 16);
* buffer = gst_buffer_new_and_alloc (info.size);
* gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
* GST_VIDEO_INFO_FORMAT(&amp;info),
* GST_VIDEO_INFO_WIDTH(&amp;info),
* GST_VIDEO_INFO_HEIGHT(&amp;info));
*
* gst_video_frame_map (&amp;frame, &amp;info, buffer, GST_MAP_WRITE);
*
* // Overlay a half-transparent blue 16x16 rectangle in the middle
* // of the frame
* data = GST_VIDEO_FRAME_PLANE_DATA(&amp;frame, 0);
* for (y = 0; y < 16; y++) {
* guint8 *line = &amp;data[y * GST_VIDEO_FRAME_PLANE_STRIDE (&amp;frame, 0)];
* for (x = 0; x < 16; x++) {
* guint8 *pixel = &amp;line[x * 4];
*
* pixel[0] = 255;
* pixel[1] = 0;
* pixel[2] = 0;
* pixel[3] = 127;
* }
* }
*
* gst_video_frame_unmap (&amp;frame);
* rect = gst_video_overlay_rectangle_new_raw (buffer,
* s->info.width / 2 - 8,
* s->info.height / 2 - 8,
* 16, 16,
* GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
* comp = gst_video_overlay_composition_new (rect);
* gst_video_overlay_rectangle_unref (rect);
* gst_buffer_unref (buffer);
*
* return comp;
* }
*
* ...
*
* overlay = gst_element_factory_make (&quot;overlaycomposition&quot;, &quot;overlay&quot;);
*
* g_signal_connect (overlay, &quot;draw&quot;, G_CALLBACK (draw_overlay),
* overlay_state);
* g_signal_connect (overlay, &quot;caps-changed&quot;,
* G_CALLBACK (prepare_overlay), overlay_state);
* ...
*
* ]|
* </refsect2>
*/
#if HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstoverlaycomposition.h"
GST_DEBUG_CATEGORY_STATIC (gst_overlay_composition_debug);
#define GST_CAT_DEFAULT gst_overlay_composition_debug
#define OVERLAY_COMPOSITION_CAPS GST_VIDEO_CAPS_MAKE (GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS)
#define ALL_CAPS OVERLAY_COMPOSITION_CAPS ";" \
GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
enum
{
SIGNAL_CAPS_CHANGED,
SIGNAL_DRAW,
LAST_SIGNAL
};
static guint overlay_composition_signals[LAST_SIGNAL];
static GstStaticCaps overlay_composition_caps =
GST_STATIC_CAPS (OVERLAY_COMPOSITION_CAPS);
static gboolean
can_blend_caps (GstCaps * incaps)
{
gboolean ret;
GstCaps *caps;
caps = gst_static_caps_get (&overlay_composition_caps);
ret = gst_caps_is_subset (incaps, caps);
gst_caps_unref (caps);
return ret;
}
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (ALL_CAPS)
);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (ALL_CAPS)
);
#define parent_class gst_overlay_composition_parent_class
G_DEFINE_TYPE (GstOverlayComposition, gst_overlay_composition,
GST_TYPE_ELEMENT);
static GstFlowReturn gst_overlay_composition_sink_chain (GstPad * pad,
GstObject * parent, GstBuffer * buffer);
static gboolean gst_overlay_composition_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static gboolean gst_overlay_composition_sink_query (GstPad * pad,
GstObject * parent, GstQuery * query);
static gboolean gst_overlay_composition_src_query (GstPad * pad,
GstObject * parent, GstQuery * query);
static GstStateChangeReturn gst_overlay_composition_change_state (GstElement *
element, GstStateChange transition);
static void
gst_overlay_composition_class_init (GstOverlayCompositionClass * klass)
{
GstElementClass *gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_overlay_composition_debug, "overlaycomposition",
0, "Overlay Composition");
gst_element_class_set_static_metadata (gstelement_class,
"Overlay Composition", "Filter/Editor/Video",
"Overlay Composition", "Sebastian Dröge <sebastian@centricular.com>");
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
gstelement_class->change_state = gst_overlay_composition_change_state;
/**
* GstOverlayComposition::draw:
* @overlay: Overlay element emitting the signal.
* @sample: #GstSample containing the current buffer, caps and segment.
*
* This signal is emitted when the overlay should be drawn.
*
* Returns: #GstVideoOverlayComposition or %NULL
*/
overlay_composition_signals[SIGNAL_DRAW] =
g_signal_new ("draw",
G_TYPE_FROM_CLASS (klass),
0,
0,
NULL,
NULL,
g_cclosure_marshal_generic,
GST_TYPE_VIDEO_OVERLAY_COMPOSITION, 1, GST_TYPE_SAMPLE);
/**
* GstOverlayComposition::caps-changed:
* @overlay: Overlay element emitting the signal.
* @caps: The #GstCaps of the element.
* @window_width: The window render width of downstream, or 0.
* @window_height: The window render height of downstream, or 0.
*
* This signal is emitted when the caps of the element has changed.
*
* The window width and height define the resolution at which the frame is
* going to be rendered in the end by e.g. a video sink (i.e. the window
* size).
*/
overlay_composition_signals[SIGNAL_CAPS_CHANGED] =
g_signal_new ("caps-changed",
G_TYPE_FROM_CLASS (klass),
0,
0, NULL, NULL, g_cclosure_marshal_generic, G_TYPE_NONE, 3, GST_TYPE_CAPS,
G_TYPE_UINT, G_TYPE_UINT);
}
static void
gst_overlay_composition_init (GstOverlayComposition * self)
{
self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (self->sinkpad,
GST_DEBUG_FUNCPTR (gst_overlay_composition_sink_chain));
gst_pad_set_event_function (self->sinkpad,
GST_DEBUG_FUNCPTR (gst_overlay_composition_sink_event));
gst_pad_set_query_function (self->sinkpad,
GST_DEBUG_FUNCPTR (gst_overlay_composition_sink_query));
gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
self->srcpad = gst_pad_new_from_static_template (&src_template, "src");
gst_pad_set_query_function (self->srcpad,
GST_DEBUG_FUNCPTR (gst_overlay_composition_src_query));
gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
}
static GstStateChangeReturn
gst_overlay_composition_change_state (GstElement * element,
GstStateChange transition)
{
GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (element);
GstStateChangeReturn state_ret;
switch (transition) {
default:
break;
}
state_ret =
GST_ELEMENT_CLASS (gst_overlay_composition_parent_class)->change_state
(element, transition);
if (state_ret == GST_STATE_CHANGE_FAILURE)
return state_ret;
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
memset (&self->info, 0, sizeof (self->info));
self->window_width = self->window_height = 0;
self->attach_compo_to_buffer = FALSE;
if (self->sample) {
gst_sample_unref (self->sample);
self->sample = NULL;
}
gst_caps_replace (&self->caps, NULL);
gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
break;
default:
break;
}
return state_ret;
}
/* Based on gstbasetextoverlay.c */
static gboolean
gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps)
{
gboolean upstream_has_meta = FALSE;
gboolean caps_has_meta = FALSE;
gboolean alloc_has_meta = FALSE;
gboolean attach = FALSE;
gboolean ret = TRUE;
guint width, height;
GstCapsFeatures *f;
GstCaps *overlay_caps;
GstQuery *query;
guint alloc_index;
GST_DEBUG_OBJECT (self, "performing negotiation");
/* Clear any pending reconfigure to avoid negotiating twice */
gst_pad_check_reconfigure (self->srcpad);
self->window_width = self->window_height = 0;
if (!caps)
caps = gst_pad_get_current_caps (self->sinkpad);
else
gst_caps_ref (caps);
if (!caps || gst_caps_is_empty (caps))
goto no_format;
/* Check if upstream caps have meta */
if ((f = gst_caps_get_features (caps, 0))) {
upstream_has_meta = gst_caps_features_contains (f,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
}
/* Initialize dimensions */
width = self->info.width;
height = self->info.height;
if (upstream_has_meta) {
overlay_caps = gst_caps_ref (caps);
} else {
GstCaps *peercaps;
/* BaseTransform requires caps for the allocation query to work */
overlay_caps = gst_caps_copy (caps);
f = gst_caps_get_features (overlay_caps, 0);
gst_caps_features_add (f,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
/* Then check if downstream accept overlay composition in caps */
/* FIXME: We should probably check if downstream *prefers* the
* overlay meta, and only enforce usage of it if we can't handle
* the format ourselves and thus would have to drop the overlays.
* Otherwise we should prefer what downstream wants here.
*/
peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps);
caps_has_meta = !gst_caps_is_empty (peercaps);
gst_caps_unref (peercaps);
GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta);
}
if (upstream_has_meta || caps_has_meta) {
/* Send caps immediatly, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (self->srcpad, overlay_caps);
/* First check if the allocation meta has compositon */
query = gst_query_new_allocation (overlay_caps, FALSE);
if (!gst_pad_peer_query (self->srcpad, query)) {
/* no problem, we use the query defaults */
GST_DEBUG_OBJECT (self, "ALLOCATION query failed");
/* In case we were flushing, mark reconfigure and fail this method,
* will make it retry */
if (GST_PAD_IS_FLUSHING (self->srcpad))
ret = FALSE;
}
alloc_has_meta = gst_query_find_allocation_meta (query,
GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);
GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta);
if (alloc_has_meta) {
const GstStructure *params;
gst_query_parse_nth_allocation_meta (query, alloc_index, &params);
if (params) {
if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
"height", G_TYPE_UINT, &height, NULL)) {
GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height);
g_assert (width != 0 && height != 0);
}
}
}
gst_query_unref (query);
}
/* Update render size if needed */
self->window_width = width;
self->window_height = height;
/* For backward compatbility, we will prefer bliting if downstream
* allocation does not support the meta. In other case we will prefer
* attaching, and will fail the negotiation in the unlikely case we are
* force to blit, but format isn't supported. */
if (upstream_has_meta) {
attach = TRUE;
} else if (caps_has_meta) {
if (alloc_has_meta) {
attach = TRUE;
} else {
/* Don't attach unless we cannot handle the format */
attach = !can_blend_caps (caps);
}
} else {
ret = can_blend_caps (caps);
}
/* If we attach, then pick the overlay caps */
if (attach) {
GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps);
/* Caps where already sent */
} else if (ret) {
GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps);
ret = gst_pad_set_caps (self->srcpad, caps);
}
self->attach_compo_to_buffer = attach;
if (!ret) {
GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure");
gst_pad_mark_reconfigure (self->srcpad);
}
g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0,
caps, self->window_width, self->window_height, NULL);
gst_caps_unref (overlay_caps);
gst_caps_unref (caps);
return ret;
no_format:
{
if (caps)
gst_caps_unref (caps);
gst_pad_mark_reconfigure (self->srcpad);
return FALSE;
}
}
static gboolean
gst_overlay_composition_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
gboolean ret = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEGMENT:
gst_event_copy_segment (event, &self->segment);
ret = gst_pad_event_default (pad, parent, event);
break;
case GST_EVENT_CAPS:{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
if (!gst_video_info_from_caps (&self->info, caps)) {
gst_event_unref (event);
ret = FALSE;
break;
}
if (!gst_overlay_composition_negotiate (self, caps)) {
gst_event_unref (event);
ret = FALSE;
break;
}
gst_caps_replace (&self->caps, caps);
ret = TRUE;
gst_event_unref (event);
break;
}
case GST_EVENT_FLUSH_STOP:
gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
ret = gst_pad_event_default (pad, parent, event);
break;
default:
ret = gst_pad_event_default (pad, parent, event);
break;
}
return ret;
}
/* Based on gstbasetextoverlay.c */
/**
* add_feature_and_intersect:
*
* Creates a new #GstCaps containing the (given caps +
* given caps feature) + (given caps intersected by the
* given filter).
*
* Returns: the new #GstCaps
*/
static GstCaps *
add_feature_and_intersect (GstCaps * caps,
const gchar * feature, GstCaps * filter)
{
int i, caps_size;
GstCaps *new_caps;
new_caps = gst_caps_copy (caps);
caps_size = gst_caps_get_size (new_caps);
for (i = 0; i < caps_size; i++) {
GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
if (!gst_caps_features_is_any (features)) {
gst_caps_features_add (features, feature);
}
}
gst_caps_append (new_caps, gst_caps_intersect_full (caps,
filter, GST_CAPS_INTERSECT_FIRST));
return new_caps;
}
/* Based on gstbasetextoverlay.c */
/**
* intersect_by_feature:
*
* Creates a new #GstCaps based on the following filtering rule.
*
* For each individual caps contained in given caps, if the
* caps uses the given caps feature, keep a version of the caps
* with the feature and an another one without. Otherwise, intersect
* the caps with the given filter.
*
* Returns: the new #GstCaps
*/
static GstCaps *
intersect_by_feature (GstCaps * caps, const gchar * feature, GstCaps * filter)
{
int i, caps_size;
GstCaps *new_caps;
new_caps = gst_caps_new_empty ();
caps_size = gst_caps_get_size (caps);
for (i = 0; i < caps_size; i++) {
GstStructure *caps_structure = gst_caps_get_structure (caps, i);
GstCapsFeatures *caps_features =
gst_caps_features_copy (gst_caps_get_features (caps, i));
GstCaps *filtered_caps;
GstCaps *simple_caps =
gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
gst_caps_set_features (simple_caps, 0, caps_features);
if (gst_caps_features_contains (caps_features, feature)) {
gst_caps_append (new_caps, gst_caps_copy (simple_caps));
gst_caps_features_remove (caps_features, feature);
filtered_caps = gst_caps_ref (simple_caps);
} else {
filtered_caps = gst_caps_intersect_full (simple_caps, filter,
GST_CAPS_INTERSECT_FIRST);
}
gst_caps_unref (simple_caps);
gst_caps_append (new_caps, filtered_caps);
}
return new_caps;
}
/* Based on gstbasetextoverlay.c */
static GstCaps *
gst_overlay_composition_sink_query_caps (GstOverlayComposition * self,
GstCaps * filter)
{
GstCaps *peer_caps = NULL, *caps = NULL, *overlay_filter = NULL;
if (filter) {
/* filter caps + composition feature + filter caps
* filtered by the software caps. */
GstCaps *sw_caps = gst_static_caps_get (&overlay_composition_caps);
overlay_filter = add_feature_and_intersect (filter,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
gst_caps_unref (sw_caps);
GST_DEBUG_OBJECT (self->sinkpad, "overlay filter %" GST_PTR_FORMAT,
overlay_filter);
}
peer_caps = gst_pad_peer_query_caps (self->srcpad, overlay_filter);
if (overlay_filter)
gst_caps_unref (overlay_filter);
if (peer_caps) {
GST_DEBUG_OBJECT (self->sinkpad, "peer caps %" GST_PTR_FORMAT, peer_caps);
if (gst_caps_is_any (peer_caps)) {
/* if peer returns ANY caps, return filtered src pad template caps */
caps = gst_caps_copy (gst_pad_get_pad_template_caps (self->srcpad));
} else {
/* duplicate caps which contains the composition into one version with
* the meta and one without. Filter the other caps by the software caps */
GstCaps *sw_caps = gst_static_caps_get (&overlay_composition_caps);
caps = intersect_by_feature (peer_caps,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
gst_caps_unref (sw_caps);
}
gst_caps_unref (peer_caps);
} else {
/* no peer, our padtemplate is enough then */
caps = gst_pad_get_pad_template_caps (self->sinkpad);
}
if (filter) {
GstCaps *intersection = gst_caps_intersect_full (filter, caps,
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
GST_DEBUG_OBJECT (self->sinkpad, "returning %" GST_PTR_FORMAT, caps);
return caps;
}
/* Based on gstbasetextoverlay.c */
static GstCaps *
gst_overlay_composition_src_query_caps (GstOverlayComposition * self,
GstCaps * filter)
{
GstCaps *peer_caps = NULL, *caps = NULL, *overlay_filter = NULL;
if (filter) {
/* duplicate filter caps which contains the composition into one version
* with the meta and one without. Filter the other caps by the software
* caps */
GstCaps *sw_caps = gst_static_caps_get (&overlay_composition_caps);
overlay_filter =
intersect_by_feature (filter,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
gst_caps_unref (sw_caps);
}
peer_caps = gst_pad_peer_query_caps (self->sinkpad, overlay_filter);
if (overlay_filter)
gst_caps_unref (overlay_filter);
if (peer_caps) {
GST_DEBUG_OBJECT (self->srcpad, "peer caps %" GST_PTR_FORMAT, peer_caps);
if (gst_caps_is_any (peer_caps)) {
/* if peer returns ANY caps, return filtered sink pad template caps */
caps = gst_caps_copy (gst_pad_get_pad_template_caps (self->sinkpad));
} else {
/* return upstream caps + composition feature + upstream caps
* filtered by the software caps. */
GstCaps *sw_caps = gst_static_caps_get (&overlay_composition_caps);
caps = add_feature_and_intersect (peer_caps,
GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
gst_caps_unref (sw_caps);
}
gst_caps_unref (peer_caps);
} else {
/* no peer, our padtemplate is enough then */
caps = gst_pad_get_pad_template_caps (self->srcpad);
}
if (filter) {
GstCaps *intersection;
intersection =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
GST_DEBUG_OBJECT (self->srcpad, "returning %" GST_PTR_FORMAT, caps);
return caps;
}
static gboolean
gst_overlay_composition_sink_query (GstPad * pad, GstObject * parent,
GstQuery * query)
{
GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
gboolean ret = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:{
GstCaps *filter, *caps;
gst_query_parse_caps (query, &filter);
caps = gst_overlay_composition_sink_query_caps (self, filter);
gst_query_set_caps_result (query, caps);
gst_caps_unref (caps);
ret = TRUE;
break;
}
default:
ret = gst_pad_query_default (pad, parent, query);
break;
}
return ret;
}
static gboolean
gst_overlay_composition_src_query (GstPad * pad, GstObject * parent,
GstQuery * query)
{
GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
gboolean ret = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:{
GstCaps *filter, *caps;
gst_query_parse_caps (query, &filter);
caps = gst_overlay_composition_src_query_caps (self, filter);
gst_query_set_caps_result (query, caps);
gst_caps_unref (caps);
ret = TRUE;
break;
}
default:
ret = gst_pad_query_default (pad, parent, query);
break;
}
return ret;
}
static GstFlowReturn
gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent,
GstBuffer * buffer)
{
GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
GstVideoOverlayComposition *compo = NULL;
GstVideoOverlayCompositionMeta *upstream_compo_meta;
if (gst_pad_check_reconfigure (self->srcpad)) {
if (!gst_overlay_composition_negotiate (self, NULL)) {
gst_pad_mark_reconfigure (self->srcpad);
gst_buffer_unref (buffer);
GST_OBJECT_LOCK (self->srcpad);
if (GST_PAD_IS_FLUSHING (self->srcpad)) {
GST_OBJECT_UNLOCK (self->srcpad);
return GST_FLOW_FLUSHING;
}
GST_OBJECT_UNLOCK (self->srcpad);
return GST_FLOW_NOT_NEGOTIATED;
}
}
if (!self->sample) {
self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL);
} else {
self->sample = gst_sample_make_writable (self->sample);
gst_sample_set_buffer (self->sample, buffer);
gst_sample_set_caps (self->sample, self->caps);
gst_sample_set_segment (self->sample, &self->segment);
}
g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0,
self->sample, &compo);
/* Don't store the buffer in the sample any longer, otherwise it will not
* be writable below as we have one reference in the sample and one in
* this function.
*
* If the sample is not writable itself then the application kept an
* reference itself.
*/
if (gst_sample_is_writable (self->sample)) {
gst_sample_set_buffer (self->sample, NULL);
}
if (!compo) {
GST_DEBUG_OBJECT (self->sinkpad,
"Application did not provide an overlay composition");
return gst_pad_push (self->srcpad, buffer);
}
/* If upstream attached a meta, we can safely add our own things
* in it. Upstream must've checked that downstream supports it */
upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer);
if (upstream_compo_meta) {
GstVideoOverlayComposition *merged_compo =
gst_video_overlay_composition_copy (upstream_compo_meta->overlay);
guint i, n;
GST_DEBUG_OBJECT (self->sinkpad,
"Appending to upstream overlay composition");
n = gst_video_overlay_composition_n_rectangles (compo);
for (i = 0; i < n; i++) {
GstVideoOverlayRectangle *rect =
gst_video_overlay_composition_get_rectangle (compo, i);
gst_video_overlay_composition_add_rectangle (merged_compo, rect);
}
gst_video_overlay_composition_unref (compo);
gst_video_overlay_composition_unref (upstream_compo_meta->overlay);
upstream_compo_meta->overlay = merged_compo;
} else if (self->attach_compo_to_buffer) {
GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta");
buffer = gst_buffer_make_writable (buffer);
gst_buffer_add_video_overlay_composition_meta (buffer, compo);
gst_video_overlay_composition_unref (compo);
} else {
GstVideoFrame frame;
buffer = gst_buffer_make_writable (buffer);
if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) {
gst_video_overlay_composition_unref (compo);
goto map_failed;
}
gst_video_overlay_composition_blend (compo, &frame);
gst_video_frame_unmap (&frame);
gst_video_overlay_composition_unref (compo);
}
return gst_pad_push (self->srcpad, buffer);
map_failed:
{
GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer");
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
}
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "overlaycomposition", GST_RANK_NONE,
GST_TYPE_OVERLAY_COMPOSITION);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
overlaycomposition,
"Renders overlays on top of video frames",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,64 @@
/* GStreamer
* Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
#include <gst/video/video.h>
#ifndef __GST_OVERLAY_COMPOSITION_H__
#define __GST_OVERLAY_COMPOSITION_H__
G_BEGIN_DECLS
#define GST_TYPE_OVERLAY_COMPOSITION \
(gst_overlay_composition_get_type())
#define GST_OVERLAY_COMPOSITION(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OVERLAY_COMPOSITION,GstOverlayComposition))
#define GST_OVERLAY_COMPOSITION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OVERLAY_COMPOSITION,GstOverlayCompositionClass))
#define GST_IS_OVERLAY_COMPOSITION(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OVERLAY_COMPOSITION))
#define GST_IS_OVERLAY_COMPOSITION_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OVERLAY_COMPOSITION))
typedef struct _GstOverlayComposition GstOverlayComposition;
typedef struct _GstOverlayCompositionClass GstOverlayCompositionClass;
struct _GstOverlayComposition {
GstElement parent;
GstPad *sinkpad, *srcpad;
/* state */
GstSample *sample;
GstSegment segment;
GstCaps *caps;
GstVideoInfo info;
guint window_width, window_height;
gboolean attach_compo_to_buffer;
};
struct _GstOverlayCompositionClass {
GstElementClass parent_class;
};
GType gst_overlay_composition_get_type (void);
G_END_DECLS
#endif /* __GST_OVERLAY_COMPOSITION_H__ */

View file

@ -0,0 +1,9 @@
gstoverlaycomposition = library('gstoverlaycomposition',
'gstoverlaycomposition.c',
c_args : gst_plugins_base_args,
include_directories: [configinc, libsinc],
dependencies : [video_dep],
install : true,
install_dir : plugins_install_dir,
)
pkgconfig.generate(gstoverlaycomposition, install_dir : plugins_pkgconfig_install_dir)

View file

@ -37,6 +37,7 @@ option('audioresample', type : 'feature', value : 'auto')
option('audiotestsrc', type : 'feature', value : 'auto')
option('encoding', type : 'feature', value : 'auto')
option('gio', type : 'feature', value : 'auto')
option('overlaycomposition', type : 'feature', value : 'auto')
option('pbtypes', type : 'feature', value : 'auto')
option('playback', type : 'feature', value : 'auto')
option('rawparse', type : 'feature', value : 'auto')

View file

@ -220,6 +220,12 @@ else
check_audioresample =
endif
if USE_PLUGIN_OVERLAYCOMPOSITION
check_overlaycomposition = elements/overlaycomposition
else
check_overlaycomposition =
endif
if HAVE_CXX
cxx_checks = libs/gstlibscpp
else
@ -271,6 +277,7 @@ check_PROGRAMS = \
$(check_gl) \
$(check_ogg) \
$(check_opus) \
$(check_overlaycomposition) \
$(check_pango) \
$(check_playback) \
$(check_rawparse) \
@ -748,6 +755,16 @@ elements_volume_CFLAGS = \
$(GST_BASE_CFLAGS) \
$(AM_CFLAGS)
elements_overlaycomposition_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(AM_CFLAGS)
elements_overlaycomposition_LDADD = \
$(top_builddir)/gst-libs/gst/video/libgstvideo-@GST_API_VERSION@.la \
$(GST_BASE_LIBS) \
$(LDADD)
elements_vorbisdec_LDADD = \
$(LDADD) \
$(VORBIS_LIBS) \

View file

@ -27,6 +27,7 @@ videoconvert
videoscale
videoscale-[1-6]
vorbistag
overlaycomposition
playbin
playbin-compressed
playbin-complex

View file

@ -0,0 +1,339 @@
/* GStreamer
*
* Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/check/gstcheck.h>
#include <gst/check/gstharness.h>
#include <gst/video/video.h>
#define VIDEO_WIDTH 320
#define VIDEO_HEIGHT 240
#define OVERLAY_WIDTH 16
#define OVERLAY_HEIGHT 16
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define VIDEO_FORMAT_STR "BGRA"
#define VIDEO_FORMAT GST_VIDEO_FORMAT_BGRA
#else
#define VIDEO_FORMAT_STR "ARGB"
#define VIDEO_FORMAT GST_VIDEO_FORMAT_ARGB
#endif
#define VIDEO_CAPS "video/x-raw, " \
"format = (string) " VIDEO_FORMAT_STR ", " \
"width = (int) 320, " \
"height = (int) 240, " \
"framerate = (fraction) 30/1"
#define VIDEO_CAPS_WITH_META "video/x-raw(" GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION "), " \
"format = (string) " VIDEO_FORMAT_STR ", " \
"width = (int) 320, " \
"height = (int) 240, " \
"framerate = (fraction) 30/1"
static GstBuffer *
create_video_frame (void)
{
GstBuffer *buffer;
GstMapInfo map;
guint i;
buffer = gst_buffer_new_and_alloc (VIDEO_WIDTH * VIDEO_HEIGHT * 4);
gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE, VIDEO_FORMAT,
VIDEO_WIDTH, VIDEO_HEIGHT);
gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
for (i = 0; i < map.size; i += 4)
GST_WRITE_UINT32_LE (map.data + i, 0xff000000);
gst_buffer_unmap (buffer, &map);
return buffer;
}
static GstBuffer *
create_overlay_frame (guint32 color)
{
GstBuffer *buffer;
GstMapInfo map;
guint i;
buffer = gst_buffer_new_and_alloc (VIDEO_WIDTH * VIDEO_HEIGHT * 4);
gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE, VIDEO_FORMAT,
VIDEO_WIDTH, VIDEO_HEIGHT);
gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
for (i = 0; i < map.size; i += 4)
GST_WRITE_UINT32_LE (map.data + i, color);
gst_buffer_unmap (buffer, &map);
return buffer;
}
typedef struct
{
gboolean valid;
GstVideoInfo info;
guint expected_window_width, expected_window_height;
GstVideoOverlayComposition *comp;
} State;
static void
on_caps_changed (GstElement * element, GstCaps * caps, guint window_width,
guint window_height, State * s)
{
fail_unless (gst_video_info_from_caps (&s->info, caps));
s->valid = TRUE;
fail_unless_equals_int (s->expected_window_width, window_width);
fail_unless_equals_int (s->expected_window_height, window_height);
}
static GstVideoOverlayComposition *
on_draw (GstElement * element, GstSample * sample, State * s)
{
fail_unless (s->valid);
fail_unless (GST_IS_SAMPLE (sample));
return gst_video_overlay_composition_ref (s->comp);
}
GST_START_TEST (render_fallback)
{
GstHarness *h;
GstVideoOverlayComposition *comp;
GstVideoOverlayRectangle *rect;
GstBuffer *buffer, *overlay;
State s = { 0, };
GstMapInfo map;
guint x, y;
h = gst_harness_new ("overlaycomposition");
g_signal_connect (h->element, "draw", G_CALLBACK (on_draw), &s);
g_signal_connect (h->element, "caps-changed", G_CALLBACK (on_caps_changed),
&s);
buffer = create_video_frame ();
overlay = create_overlay_frame (0x80ffffff);
rect =
gst_video_overlay_rectangle_new_raw (overlay, 32, 32, OVERLAY_WIDTH,
OVERLAY_HEIGHT, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
gst_buffer_unref (overlay);
comp = gst_video_overlay_composition_new (rect);
gst_video_overlay_rectangle_unref (rect);
s.comp = comp;
s.expected_window_width = VIDEO_WIDTH;
s.expected_window_height = VIDEO_HEIGHT;
gst_harness_set_src_caps_str (h, VIDEO_CAPS);
buffer = gst_harness_push_and_pull (h, buffer);
gst_buffer_map (buffer, &map, GST_MAP_READ);
fail_unless_equals_int (map.size, VIDEO_WIDTH * VIDEO_HEIGHT * 4);
for (y = 0; y < VIDEO_HEIGHT; y++) {
for (x = 0; x < VIDEO_WIDTH; x++) {
guint32 val = GST_READ_UINT32_LE (map.data + y * VIDEO_WIDTH * 4 + x * 4);
guint32 expected_val;
if ((x >= 32 && x < 48) && (y >= 32 && y < 48)) {
expected_val = 0xff808080;
} else {
expected_val = 0xff000000;
}
fail_unless (val == expected_val, "Expected %08x but got %08x at (%u,%u)",
expected_val, val, x, y);
}
}
gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
gst_video_overlay_composition_unref (s.comp);
gst_harness_teardown (h);
}
GST_END_TEST;
GST_START_TEST (render_fallback_2)
{
GstHarness *h;
GstVideoOverlayComposition *comp;
GstVideoOverlayRectangle *rect;
GstBuffer *buffer, *overlay;
State s = { 0, };
GstMapInfo map;
guint x, y;
h = gst_harness_new ("overlaycomposition");
g_signal_connect (h->element, "draw", G_CALLBACK (on_draw), &s);
g_signal_connect (h->element, "caps-changed", G_CALLBACK (on_caps_changed),
&s);
overlay = create_overlay_frame (0xffff0000);
rect =
gst_video_overlay_rectangle_new_raw (overlay, 32, 32, OVERLAY_WIDTH,
OVERLAY_HEIGHT, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
gst_buffer_unref (overlay);
comp = gst_video_overlay_composition_new (rect);
gst_video_overlay_rectangle_unref (rect);
overlay = create_overlay_frame (0xff0000ff);
rect =
gst_video_overlay_rectangle_new_raw (overlay, 64, 64, OVERLAY_WIDTH,
OVERLAY_HEIGHT, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
gst_buffer_unref (overlay);
gst_video_overlay_composition_add_rectangle (comp, rect);
gst_video_overlay_rectangle_unref (rect);
s.comp = comp;
s.expected_window_width = VIDEO_WIDTH;
s.expected_window_height = VIDEO_HEIGHT;
gst_harness_set_src_caps_str (h, VIDEO_CAPS);
buffer = create_video_frame ();
buffer = gst_harness_push_and_pull (h, buffer);
gst_buffer_map (buffer, &map, GST_MAP_READ);
fail_unless_equals_int (map.size, VIDEO_WIDTH * VIDEO_HEIGHT * 4);
for (y = 0; y < VIDEO_HEIGHT; y++) {
for (x = 0; x < VIDEO_WIDTH; x++) {
guint32 val = GST_READ_UINT32_LE (map.data + y * VIDEO_WIDTH * 4 + x * 4);
guint32 expected_val;
if ((x >= 32 && x < 48) && (y >= 32 && y < 48)) {
expected_val = 0xffff0000;
} else if ((x >= 64 && x < 80) && (y >= 64 && y < 80)) {
expected_val = 0xff0000ff;
} else {
expected_val = 0xff000000;
}
fail_unless (val == expected_val, "Expected %08x but got %08x at (%u,%u)",
expected_val, val, x, y);
}
}
gst_buffer_unmap (buffer, &map);
gst_buffer_unref (buffer);
gst_video_overlay_composition_unref (s.comp);
gst_harness_teardown (h);
}
GST_END_TEST;
GST_START_TEST (render_meta)
{
GstHarness *h;
GstVideoOverlayComposition *comp;
GstVideoOverlayRectangle *rect;
GstBuffer *buffer, *overlay;
State s = { 0, };
GstMapInfo map;
guint x, y;
GstVideoOverlayCompositionMeta *meta;
h = gst_harness_new ("overlaycomposition");
g_signal_connect (h->element, "draw", G_CALLBACK (on_draw), &s);
g_signal_connect (h->element, "caps-changed", G_CALLBACK (on_caps_changed),
&s);
overlay = create_overlay_frame (0xffff0000);
rect =
gst_video_overlay_rectangle_new_raw (overlay, 32, 32, OVERLAY_WIDTH,
OVERLAY_HEIGHT, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
gst_buffer_unref (overlay);
comp = gst_video_overlay_composition_new (rect);
gst_video_overlay_rectangle_unref (rect);
overlay = create_overlay_frame (0xff0000ff);
rect =
gst_video_overlay_rectangle_new_raw (overlay, 64, 64, OVERLAY_WIDTH,
OVERLAY_HEIGHT, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
gst_buffer_unref (overlay);
gst_video_overlay_composition_add_rectangle (comp, rect);
gst_video_overlay_rectangle_unref (rect);
s.comp = comp;
s.expected_window_width = VIDEO_WIDTH;
s.expected_window_height = VIDEO_HEIGHT;
gst_harness_add_propose_allocation_meta (h,
GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
gst_harness_set_src_caps_str (h, VIDEO_CAPS);
buffer = create_video_frame ();
buffer = gst_harness_push_and_pull (h, buffer);
gst_buffer_map (buffer, &map, GST_MAP_READ);
fail_unless_equals_int (map.size, VIDEO_WIDTH * VIDEO_HEIGHT * 4);
for (y = 0; y < VIDEO_HEIGHT; y++) {
for (x = 0; x < VIDEO_WIDTH; x++) {
guint32 val = GST_READ_UINT32_LE (map.data + y * VIDEO_WIDTH * 4 + x * 4);
guint32 expected_val = 0xff000000;
fail_unless (val == expected_val, "Expected %08x but got %08x at (%u,%u)",
expected_val, val, x, y);
}
}
gst_buffer_unmap (buffer, &map);
meta = gst_buffer_get_video_overlay_composition_meta (buffer);
fail_unless (meta);
fail_unless (meta->overlay == s.comp);
gst_buffer_unref (buffer);
gst_video_overlay_composition_unref (s.comp);
gst_harness_teardown (h);
}
GST_END_TEST;
static Suite *
overlaycomposition_suite (void)
{
Suite *s = suite_create ("overlaycomposition");
TCase *tc = tcase_create ("general");
suite_add_tcase (s, tc);
tcase_add_test (tc, render_fallback);
tcase_add_test (tc, render_fallback_2);
tcase_add_test (tc, render_meta);
return s;
}
GST_CHECK_MAIN (overlaycomposition);

View file

@ -43,6 +43,7 @@ base_tests = [
[ 'elements/multifdsink.c', not core_conf.has('HAVE_SYS_SOCKET_H') or not core_conf.has('HAVE_UNISTD_H') ],
# FIXME: multisocketsink test on windows/msvc
[ 'elements/multisocketsink.c', not core_conf.has('HAVE_SYS_SOCKET_H') or not core_conf.has('HAVE_UNISTD_H') ],
[ 'elements/overlaycomposition.c' ],
[ 'elements/playbin.c' ],
[ 'elements/playbin-complex.c', not ogg_dep.found() ],
[ 'elements/playsink.c' ],

View file

@ -8,8 +8,8 @@ else
GL_DIR=
endif
SUBDIRS = app audio decodebin_next dynamic fft gio $(GL_DIR) $(GTK_SUBDIRS) overlay playrec encoding
DIST_SUBDIRS = app audio dynamic decodebin_next fft gio gl playback overlay seek snapshot playrec encoding
SUBDIRS = app audio decodebin_next dynamic fft gio $(GL_DIR) $(GTK_SUBDIRS) overlay overlaycomposition playrec encoding
DIST_SUBDIRS = app audio dynamic decodebin_next fft gio gl playback overlay overlaycomposition seek snapshot playrec encoding
include $(top_srcdir)/common/parallel-subdirs.mak

View file

@ -9,6 +9,7 @@ if build_gstgl
subdir('gl')
endif
subdir('overlay')
subdir('overlaycomposition')
subdir('playback')
subdir('playrec')
subdir('seek')

View file

@ -0,0 +1 @@
overlaycomposition

View file

@ -0,0 +1,5 @@
noinst_PROGRAMS = overlaycomposition
overlaycomposition_SOURCES = overlaycomposition.c
overlaycomposition_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(GIO_CFLAGS)
overlaycomposition_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_API_VERSION) $(GST_LIBS) $(GIO_LIBS) $(LIBM)

View file

@ -0,0 +1,5 @@
executable('overlaycomposition', 'overlaycomposition.c',
dependencies: [video_dep, gst_dep, gio_dep, libm],
c_args : gst_plugins_good_args,
include_directories : [configinc],
install: false)

View file

@ -0,0 +1,316 @@
/* GStreamer
* Copyright (C) 2014 Tim-Philipp Müller <tim centricular com>
* Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <math.h>
#include <gio/gio.h>
#define VIDEO_WIDTH 720
#define VIDEO_HEIGHT 480
#define VIDEO_FPS 50
/* GdkPixbuf RGBA C-Source image dump from gdk-pixbuf-csource --raw,
* gzipped and then base64 encoded */
const gchar gzipped_pixdata_base64[] =
"H4sICPX/Z1QAA2xvZ28ucGl4AO2dsZHrNhCG+ewK2II64ClyrhmnTtSBh4kLUOLQAUuwEhSgFtiA"
"A7agwA2wBT5AXJ5w4P5LgKLEO97ezDf2SCAIAftjFwuQ7/c///ojy/77+8eP7Jcs+/WfLMv+t/zW"
"dV32pSmK3HK6sXZbFOXV9PZfWipLbWksHdHQZxVpZP+Ee7u62/d7rt0fivIqimJnOX+w/zhauq68"
"aWjevfdUR1h3vXq/KMqzufueFN1J1FE+stf8KfCzobZ3q/ePojyT3v8gDSyB09GFND7g/N014rpl"
"41xF+Wz0/i817nwFjepP+Rb0PnDwOUOehePyQq1eurlrSkXZOkVx7OblbGK43upf+zcqyldhOT22"
"5GvV9ynKXIriQDpKye242Ldcve2KskX6deaRWVeebnpdu32Koryc4t/iaKksmm9Vvg3W3neWk+Vs"
"qS2tpSOu9NmZtLG4f7R15paS7jXct1q7XxTl2ZDd157dp1A/6q9I+1Wg+QH1g8pmcb7M0szUHseV"
"/KTTtJg39XyudH/NASmbhfzOUtqTNHmhew1cglhT9ad8O16kv7m4eFT3/pVNQ77IX8shLgW/RnsW"
"Li7V5y4UJaDo9wnOT9Sjq1fzn4oSQXHft4tZ00Vpr5jI3yiKwmO1sy/63GZKzNpS+dVyLsaYneVk"
"Ud+rbIqi32M/kC7DtaXzn6vt9Vm95ZbS0lg64rx2nynKdyHQ3oC+A06ZxdtbkztWundpqS1fKo5j"
"9OfQ8+jKJNbW987eye47QGO5WE6u/BPakJP2rt4927X7JhanNaBBzQUpEMbmU2hJk66O2ftt7lrS"
"f8vc4yl+0P7llspyWapOl3th9Hdde4yVz4m17eMD2pP8ZBXjI0l3J7oG1Rf1DlH7dyBOpKuB2+dB"
"2ZzKXS3ugw9+lnKZB49oH2bL1owGH9K4vX5P7Yg+0+O1/Uj/XSRm8ep9Ws6LclrRfe+N1+xYIxjz"
"49Jrh6D+m68inSypPeQja7pXRZp3/z1P6M7XM+xX+7eznElHc3E63Hv2xWmoo89H4yLEnpDg+orq"
"DsnJFq7oWqYtLhd7sbTg3i3dL2VO2Xlt5Oo7DzY1Uc8VtOng9eMZtP0ctpnmJa787Tcm/jbUtmHc"
"xfmG+p0bwz0R9t1hYr33WTgj/ZEfe1R7HfnJ3NNCjIbKoP8f1SBn2w2Nq3htYI9o7uBopnRI9nmO"
"rM/ZveijhWulee+DFrx2xZSHe0Gmn99ix5sd96A+1E97w88pB8bX1ABubfYK4DxG+msYPbWWksqU"
"gu5a0u/O68PQ3t99hWF8kW9vC2gQaWTyWtD2sB5UVzOhGeRLJR2y/pB0k9RHgCqxXaP2kP5Qn9TU"
"n+He7rumwO/j5oSr0FanQZf/jIp33+650qXXjSj2FP2+y58AbYVrPs5PjuyOxsTvq5H+yYb8Mofg"
"u8rgmKb2vq/C+lPtMLj2KOjBbyPSqTS3N9T2Y/D5SbAtdt1r4uapoZ9ife+VroHzlQn2ZCf0dw7K"
"7rk2Cn2VMo6z15pP1KPzt5N72LT+4/Q36hv7d+R8INN/oR2zdunZEcyvANuEPj3SNlkNmvHcIY6x"
"4eeHWbkiSfugvBT3jeLYifKdCdaHggbC+Q7Vi+YOriznW5PG8BENAj3G5FYQTsslWvcxuqqABke6"
"oXzoqGyEfcC1kunnXRRvcfOm2N9mOo4szX1df/DnB4P9BbsOMmANNXf8F9TgaB1p5LlppBehfr+/"
"0PiwuhJ+Y+hbpXpbGsNbLtTcc66LPzP31u+rDznPesJPDnnS5Ny2oMGRnSdoEOVERjpE2qTvTmAc"
"pGuQ7Yg5EyP7QDY3gjQozREzNIhiNXTv1Ni1BeOCyvvxOJqzUJtzUD6MWaX1wKd4P9HbgmfdhFwL"
"p0FOr9x6EOngmtKHpt8TGNUxcQ2yTXGeNLL/lHz4Qxo09/0tNN8gH4zainIcyK7ZuF7Q4D6iDal1"
"1kE5ZD+bfD+DsB48BuXy7L7//mEvgulrKWd3iyUi7ZPzS+J6C1wz+XyFkfMWB0D02tHrl2FfMDYX"
"yfUvitXg2QvBrlHMyM4JEW3oqC+5/orymwbEUWtr5ck65PKidVCGy4leh71AZgwlv9JN6VDQsZSP"
"QddM+iUw7nM5BHWn7jWKbRf6VsrJcjEFtGug2dr7HvntOYQa5OanTT8jY//2tMcXauxCuVBOo+9n"
"YYRxnNIhvN7g2AnGI+iamD4w6bnwqN810QctaUM6V8LlDJFPk9a7qfllzmdVEW2Yg18vmkc/xTrw"
"yTqU9uDD/fgK+b9EHcK1nTDGcF0Hrok6Gyu00c33KBZlifztYY6fPbcG2poUqwl2LeWXuXv4OVEp"
"95XUX/6Yoj5bWx8v0F8OfN3g72rS3TFWewk6RHuHXOwkjoXhfVnsOcdFc5xUJ/Jto7gKlEX5xUlN"
"x/T/RNvFvhA0GDXnCfedPY9+ZUhjvu4Wfy5P0CHKoXGaEMci1TYj7sfqJbI+uBfwSNuFelPPXkp+"
"k823BGXQWuGh3AmYRzf9zpKMOfvyYB8uEiuC8ZXO00zuZ020G83rs8Y/xU8gmwcaRPVKa+uk/LLh"
"8y1NUEbKfc+aww3eP/xS73xIBez5XbKPzwuW3H4h6Ec3j6E9ba6PkR9MGgvBNmPbLeXa58TfKRqM"
"brtJPJuTom/vGm4dwJ2lQXmsWe/cM4l7mFtBOCcjPR+B9iMGW4LnGWLHa4bdzNqbj6wj2QYEXYX+"
"JPWMKlcOvpdEaIcUu0blUA1eX7Bnbx7os02/N4iLRSMY7QuacWwSc24YjhXQgxQ/sT4s0QbQs2kO"
"dl1o+hiYs09p/8x/7kPaqw/3GZGfkPbmka9C5wiQ30RzJdp3R2cTh3MKozk6ta1bgfYGUzU42sMX"
"xsP1q4ttuLxfci7Ps18/RwfPI6f2x4QOG087/m8arRlN2vN9yP+G5yiTcpETbUBxCpo7kGZzof3D"
"OwGG56ca7/NwDkdrwc1qUNiXT4Hrx9hn1WLOj0n7wDF7Vcka9Gw39lyL9Gz51P78ebAv1G9BfWiv"
"A2kQ+mKhzbP2aEz8s8DsmWGDfbxjc/+OENDfib4b3uNUejmZqGd8g/5EY3k1CWt2cz9nONR380XM"
"+HPvHlnivU/ce19aA96FA/TsP2984q6jcpeg/f75kTyoxwfFiSW6ZkJLHDHvt8np93Hv3xliCLT2"
"OIEx3OTeILMf2EztCYKYNcYG2fMQivKdYbQUpY1s/M6ZTcbpivJMMv5Zpdhn7mdpV1GUO1l/NjTU"
"Usx7Z8Iz3ZuM0xXlFWT8s4CsDkmzJZPD0ThUUWaSye8THZ6RqOj/uf2L1f79T0XZCpn8vJJ0Pkb9"
"n6IsCOVoBp/HvS+moe83+T4dRVEUZRmyn2F9swl9yAAA";
static GstBuffer *logo_buf;
static GMainLoop *main_loop;
static gint count;
static GstBuffer *
create_overlay_buffer (void)
{
GZlibDecompressor *decompress;
GConverterResult decomp_res;
guchar *gzipped_pixdata, *pixdata;
gsize gzipped_size, bytes_read, pixdata_size;
GstBuffer *logo_pixels;
guint w, h, stride;
gzipped_pixdata = g_base64_decode (gzipped_pixdata_base64, &gzipped_size);
g_assert (gzipped_pixdata != NULL);
pixdata = g_malloc (64 * 1024);
decompress = g_zlib_decompressor_new (G_ZLIB_COMPRESSOR_FORMAT_GZIP);
decomp_res = g_converter_convert (G_CONVERTER (decompress),
gzipped_pixdata, gzipped_size, pixdata, 64 * 1024,
G_CONVERTER_INPUT_AT_END, &bytes_read, &pixdata_size, NULL);
g_assert (decomp_res == G_CONVERTER_FINISHED);
g_assert (bytes_read == gzipped_size);
g_free (gzipped_pixdata);
g_object_unref (decompress);
/* 0: Pixbuf magic (0x47646b50) */
g_assert (GST_READ_UINT32_BE (pixdata) == 0x47646b50);
/* 4: length incl. header */
/* 8: pixdata_type */
/* 12: rowstride (900) */
stride = GST_READ_UINT32_BE (pixdata + 12);
/* 16: width (225) */
w = GST_READ_UINT32_BE (pixdata + 16);
/* 20: height (57) */
h = GST_READ_UINT32_BE (pixdata + 20);
/* 24: pixel_data */
GST_LOG ("%dx%d @ %d", w, h, stride);
/* we assume that the last line also has padding at the end */
g_assert (pixdata_size - 24 >= h * stride);
logo_pixels = gst_buffer_new_and_alloc (h * stride);
gst_buffer_fill (logo_pixels, 0, pixdata + 24, h * stride);
gst_buffer_add_video_meta (logo_pixels, GST_VIDEO_FRAME_FLAG_NONE,
GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, w, h);
g_free (pixdata);
return logo_pixels;
}
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (main_loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:
g_print ("Got EOS\n");
g_main_loop_quit (main_loop);
break;
default:
break;
}
return TRUE;
}
typedef struct
{
gboolean valid;
GstVideoInfo info;
} OverlayState;
static void
prepare_overlay (GstElement * overlay, GstCaps * caps, gint window_width,
gint window_height, gpointer user_data)
{
OverlayState *s = (OverlayState *) user_data;
if (gst_video_info_from_caps (&s->info, caps))
s->valid = TRUE;
else
s->valid = FALSE;
}
#define SPEED_SCALE_FACTOR (VIDEO_FPS * 4)
/* nicked from videotestsrc's ball pattern renderer */
static void
calculate_position (gint * x, gint * y, guint logo_w, guint logo_h, guint n)
{
guint r_x = logo_w / 2;
guint r_y = logo_h / 2;
guint w = VIDEO_WIDTH + logo_w;
guint h = VIDEO_HEIGHT + logo_h;
*x = r_x + (0.5 + 0.5 * sin (2 * G_PI * n / SPEED_SCALE_FACTOR))
* (w - 2 * r_x);
*y = r_y + (0.5 + 0.5 * sin (2 * G_PI * sqrt (2) * n / SPEED_SCALE_FACTOR))
* (h - 2 * r_y);
*x -= logo_w;
*y -= logo_h;
}
static GstVideoOverlayComposition *
draw_overlay (GstElement * overlay, GstSample * sample, gpointer user_data)
{
OverlayState *s = (OverlayState *) user_data;
GstVideoOverlayRectangle *rect;
GstVideoOverlayComposition *comp;
GstVideoMeta *vmeta;
gint x, y;
if (!s->valid)
return NULL;
vmeta = gst_buffer_get_video_meta (logo_buf);
calculate_position (&x, &y, vmeta->width, vmeta->height, ++count);
rect = gst_video_overlay_rectangle_new_raw (logo_buf, x, y,
vmeta->width, vmeta->height, GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
comp = gst_video_overlay_composition_new (rect);
gst_video_overlay_rectangle_unref (rect);
return comp;
}
int
main (int argc, char **argv)
{
GstElement *pipeline;
GstElement *src, *capsfilter, *overlay, *conv, *sink;
GstBus *bus;
GstCaps *filter_caps;
OverlayState overlay_state = { 0, };
gst_init (&argc, &argv);
pipeline = gst_pipeline_new (NULL);
src = gst_element_factory_make ("videotestsrc", NULL);
capsfilter = gst_element_factory_make ("capsfilter", NULL);
overlay = gst_element_factory_make ("overlaycomposition", NULL);
conv = gst_element_factory_make ("videoconvert", NULL);
sink = gst_element_factory_make ("autovideosink", NULL);
if (!pipeline || !src || !capsfilter || !overlay || !conv || !sink) {
g_error ("Failed to create elements");
return -1;
}
gst_bin_add_many (GST_BIN (pipeline), src, capsfilter, overlay, conv, sink,
NULL);
if (!gst_element_link_many (src, capsfilter, overlay, conv, sink, NULL)) {
g_error ("Failed to link elements");
return -2;
}
filter_caps = gst_caps_from_string ("video/x-raw, format = "
GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS);
gst_caps_set_simple (filter_caps,
"width", G_TYPE_INT, VIDEO_WIDTH,
"height", G_TYPE_INT, VIDEO_HEIGHT,
"framerate", GST_TYPE_FRACTION, VIDEO_FPS, 1, NULL);
g_object_set (capsfilter, "caps", filter_caps, NULL);
gst_caps_unref (filter_caps);
g_signal_connect (overlay, "draw", G_CALLBACK (draw_overlay), &overlay_state);
g_signal_connect (overlay, "caps-changed",
G_CALLBACK (prepare_overlay), &overlay_state);
count = 0;
logo_buf = create_overlay_buffer ();
main_loop = g_main_loop_new (NULL, FALSE);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (message_cb), NULL);
gst_object_unref (GST_OBJECT (bus));
if (gst_element_set_state (pipeline,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
return -3;
}
g_main_loop_run (main_loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (main_loop);
gst_object_unref (pipeline);
return 0;
}