2010-07-26 14:07:15 +00:00
|
|
|
/* Generic video mixer plugin
|
|
|
|
* Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
|
|
|
|
* Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
2012-11-04 00:07:18 +00:00
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
2010-07-26 14:07:15 +00:00
|
|
|
*/
|
|
|
|
|
2010-09-02 09:19:06 +00:00
|
|
|
/**
|
2012-01-26 17:43:06 +00:00
|
|
|
* SECTION:element-videomixer
|
2010-09-02 09:19:06 +00:00
|
|
|
*
|
|
|
|
* Videomixer2 can accept AYUV, ARGB and BGRA video streams. For each of the requested
|
|
|
|
* sink pads it will compare the incoming geometry and framerate to define the
|
|
|
|
* output parameters. Indeed output video frames will have the geometry of the
|
|
|
|
* biggest incoming video stream and the framerate of the fastest incoming one.
|
|
|
|
*
|
2013-07-26 17:40:53 +00:00
|
|
|
* Videomixer will do colorspace conversion.
|
2010-09-02 09:19:06 +00:00
|
|
|
*
|
|
|
|
* Individual parameters for each input stream can be configured on the
|
|
|
|
* #GstVideoMixer2Pad.
|
|
|
|
*
|
|
|
|
* <refsect2>
|
|
|
|
* <title>Sample pipelines</title>
|
|
|
|
* |[
|
2012-08-26 21:32:54 +00:00
|
|
|
* gst-launch-1.0 \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videotestsrc pattern=1 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw,format=AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
|
|
|
|
* videoconvert ! xvimagesink \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videotestsrc ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw,format=AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
|
2010-09-02 09:19:06 +00:00
|
|
|
* ]| A pipeline to demonstrate videomixer used together with videobox.
|
|
|
|
* This should show a 320x240 pixels video test source with some transparency
|
|
|
|
* showing the background checker pattern. Another video test source with just
|
|
|
|
* the snow pattern of 100x100 pixels is overlayed on top of the first one on
|
|
|
|
* the left vertically centered with a small transparency showing the first
|
|
|
|
* video test source behind and the checker pattern under it. Note that the
|
|
|
|
* framerate of the output video is 10 frames per second.
|
|
|
|
* |[
|
2012-08-26 21:32:54 +00:00
|
|
|
* gst-launch-1.0 videotestsrc pattern=1 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw, framerate=\(fraction\)10/1, width=100, height=100 ! \
|
|
|
|
* videomixer name=mix ! videoconvert ! ximagesink \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videotestsrc ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
|
2010-09-02 09:19:06 +00:00
|
|
|
* ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
|
|
|
|
* |[
|
2012-08-26 21:32:54 +00:00
|
|
|
* gst-launch-1.0 videotestsrc pattern=1 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
|
|
|
|
* videomixer name=mix ! videoconvert ! ximagesink \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videotestsrc ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* video/x-raw,format=I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
|
2010-09-02 09:19:06 +00:00
|
|
|
* ]| A pipeline to test I420
|
|
|
|
* |[
|
2012-08-26 21:32:54 +00:00
|
|
|
* gst-launch-1.0 videomixer name=mixer sink_1::alpha=0.5 sink_1::xpos=50 sink_1::ypos=50 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* videoconvert ! ximagesink \
|
2010-09-02 09:19:06 +00:00
|
|
|
* videotestsrc pattern=snow timestamp-offset=3000000000 ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* "video/x-raw,format=AYUV,width=640,height=480,framerate=(fraction)30/1" ! \
|
2010-09-02 09:19:06 +00:00
|
|
|
* timeoverlay ! queue2 ! mixer. \
|
|
|
|
* videotestsrc pattern=smpte ! \
|
2012-01-26 17:43:06 +00:00
|
|
|
* "video/x-raw,format=AYUV,width=800,height=600,framerate=(fraction)10/1" ! \
|
2010-09-02 09:19:06 +00:00
|
|
|
* timeoverlay ! queue2 ! mixer.
|
|
|
|
* ]| A pipeline to demonstrate synchronized mixing (the second stream starts after 3 seconds)
|
|
|
|
* </refsect2>
|
|
|
|
*/
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
#include "config.h"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
#include "videomixer2.h"
|
2010-09-02 09:19:06 +00:00
|
|
|
#include "videomixer2pad.h"
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2011-04-01 09:33:54 +00:00
|
|
|
#ifdef DISABLE_ORC
|
|
|
|
#define orc_memset memset
|
|
|
|
#else
|
|
|
|
#include <orc/orcfunctions.h>
|
|
|
|
#endif
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_DEBUG_CATEGORY_STATIC (gst_videomixer2_debug);
|
|
|
|
#define GST_CAT_DEFAULT gst_videomixer2_debug
|
|
|
|
|
|
|
|
#define GST_VIDEO_MIXER2_GET_LOCK(mix) \
|
2012-01-26 17:15:51 +00:00
|
|
|
(&GST_VIDEO_MIXER2(mix)->lock)
|
2010-07-26 14:07:15 +00:00
|
|
|
#define GST_VIDEO_MIXER2_LOCK(mix) \
|
|
|
|
(g_mutex_lock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
|
|
|
|
#define GST_VIDEO_MIXER2_UNLOCK(mix) \
|
|
|
|
(g_mutex_unlock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
|
2012-09-13 00:10:00 +00:00
|
|
|
#define GST_VIDEO_MIXER2_GET_SETCAPS_LOCK(mix) \
|
|
|
|
(&GST_VIDEO_MIXER2(mix)->setcaps_lock)
|
|
|
|
#define GST_VIDEO_MIXER2_SETCAPS_LOCK(mix) \
|
|
|
|
(g_mutex_lock(GST_VIDEO_MIXER2_GET_SETCAPS_LOCK (mix)))
|
|
|
|
#define GST_VIDEO_MIXER2_SETCAPS_UNLOCK(mix) \
|
|
|
|
(g_mutex_unlock(GST_VIDEO_MIXER2_GET_SETCAPS_LOCK (mix)))
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
#define FORMATS " { AYUV, BGRA, ARGB, RGBA, ABGR, Y444, Y42B, YUY2, UYVY, "\
|
2012-09-07 21:20:57 +00:00
|
|
|
" YVYU, I420, YV12, NV12, NV21, Y41B, RGB, BGR, xRGB, xBGR, "\
|
|
|
|
" RGBx, BGRx } "
|
2012-01-25 17:40:03 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
|
|
|
|
GST_PAD_SRC,
|
|
|
|
GST_PAD_ALWAYS,
|
2012-01-25 17:40:03 +00:00
|
|
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
|
2010-07-26 14:07:15 +00:00
|
|
|
);
|
|
|
|
|
2012-05-01 17:58:03 +00:00
|
|
|
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_PAD_SINK,
|
|
|
|
GST_PAD_REQUEST,
|
2012-01-25 17:40:03 +00:00
|
|
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
|
2010-07-26 14:07:15 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
static void gst_videomixer2_child_proxy_init (gpointer g_iface,
|
|
|
|
gpointer iface_data);
|
|
|
|
static gboolean gst_videomixer2_push_sink_event (GstVideoMixer2 * mix,
|
|
|
|
GstEvent * event);
|
|
|
|
static void gst_videomixer2_release_pad (GstElement * element, GstPad * pad);
|
|
|
|
static void gst_videomixer2_reset_qos (GstVideoMixer2 * mix);
|
|
|
|
|
|
|
|
struct _GstVideoMixer2Collect
|
|
|
|
{
|
2012-04-17 13:14:27 +00:00
|
|
|
GstCollectData collect; /* we extend the CollectData */
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
GstVideoMixer2Pad *mixpad;
|
|
|
|
|
|
|
|
GstBuffer *queued; /* buffer for which we don't know the end time yet */
|
2014-03-11 03:46:06 +00:00
|
|
|
GstVideoInfo queued_vinfo;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
GstBuffer *buffer; /* buffer that should be blended now */
|
2014-03-11 03:46:06 +00:00
|
|
|
GstVideoInfo buffer_vinfo;
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
GstClockTime start_time;
|
|
|
|
GstClockTime end_time;
|
|
|
|
};
|
|
|
|
|
|
|
|
#define DEFAULT_PAD_ZORDER 0
|
|
|
|
#define DEFAULT_PAD_XPOS 0
|
|
|
|
#define DEFAULT_PAD_YPOS 0
|
|
|
|
#define DEFAULT_PAD_ALPHA 1.0
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_PAD_0,
|
|
|
|
PROP_PAD_ZORDER,
|
|
|
|
PROP_PAD_XPOS,
|
|
|
|
PROP_PAD_YPOS,
|
|
|
|
PROP_PAD_ALPHA
|
|
|
|
};
|
|
|
|
|
|
|
|
G_DEFINE_TYPE (GstVideoMixer2Pad, gst_videomixer2_pad, GST_TYPE_PAD);
|
|
|
|
|
|
|
|
static void
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_videomixer2_collect_free (GstCollectData * data)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstVideoMixer2Collect *cdata = (GstVideoMixer2Collect *) data;
|
|
|
|
|
|
|
|
gst_buffer_replace (&cdata->buffer, NULL);
|
|
|
|
}
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
static gboolean gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix,
|
|
|
|
GstCaps * caps);
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
static gboolean
|
|
|
|
gst_videomixer2_update_src_caps (GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
GSList *l;
|
|
|
|
gint best_width = -1, best_height = -1;
|
|
|
|
gdouble best_fps = -1, cur_fps;
|
|
|
|
gint best_fps_n = -1, best_fps_d = -1;
|
|
|
|
gboolean ret = TRUE;
|
|
|
|
|
2012-09-13 00:10:00 +00:00
|
|
|
GST_VIDEO_MIXER2_SETCAPS_LOCK (mix);
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
|
|
|
|
for (l = mix->sinkpads; l; l = l->next) {
|
|
|
|
GstVideoMixer2Pad *mpad = l->data;
|
|
|
|
gint this_width, this_height;
|
2012-01-25 17:40:03 +00:00
|
|
|
gint fps_n, fps_d;
|
|
|
|
gint width, height;
|
|
|
|
|
|
|
|
fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
|
|
|
|
fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
|
|
|
|
width = GST_VIDEO_INFO_WIDTH (&mpad->info);
|
|
|
|
height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-07-09 09:53:07 +00:00
|
|
|
if (width == 0 || height == 0)
|
2010-07-26 14:07:15 +00:00
|
|
|
continue;
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
this_width = width + MAX (mpad->xpos, 0);
|
|
|
|
this_height = height + MAX (mpad->ypos, 0);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
if (best_width < this_width)
|
|
|
|
best_width = this_width;
|
|
|
|
if (best_height < this_height)
|
|
|
|
best_height = this_height;
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
if (fps_d == 0)
|
2010-07-26 14:07:15 +00:00
|
|
|
cur_fps = 0.0;
|
|
|
|
else
|
2012-01-25 17:40:03 +00:00
|
|
|
gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
if (best_fps < cur_fps) {
|
|
|
|
best_fps = cur_fps;
|
2012-01-25 17:40:03 +00:00
|
|
|
best_fps_n = fps_n;
|
|
|
|
best_fps_d = fps_d;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-09 09:53:07 +00:00
|
|
|
if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
|
2010-07-26 14:07:15 +00:00
|
|
|
best_fps_n = 25;
|
|
|
|
best_fps_d = 1;
|
|
|
|
best_fps = 25.0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (best_width > 0 && best_height > 0 && best_fps > 0) {
|
|
|
|
GstCaps *caps, *peercaps;
|
|
|
|
GstStructure *s;
|
2012-01-25 17:40:03 +00:00
|
|
|
GstVideoInfo info;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
if (GST_VIDEO_INFO_FPS_N (&mix->info) != best_fps_n ||
|
|
|
|
GST_VIDEO_INFO_FPS_D (&mix->info) != best_fps_d) {
|
|
|
|
if (mix->segment.position != -1) {
|
|
|
|
mix->ts_offset = mix->segment.position - mix->segment.start;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->nframes = 0;
|
|
|
|
}
|
|
|
|
}
|
2012-09-18 10:13:21 +00:00
|
|
|
gst_video_info_init (&info);
|
2012-01-25 17:40:03 +00:00
|
|
|
gst_video_info_set_format (&info, GST_VIDEO_INFO_FORMAT (&mix->info),
|
|
|
|
best_width, best_height);
|
|
|
|
info.fps_n = best_fps_n;
|
|
|
|
info.fps_d = best_fps_d;
|
|
|
|
info.par_n = GST_VIDEO_INFO_PAR_N (&mix->info);
|
|
|
|
info.par_d = GST_VIDEO_INFO_PAR_D (&mix->info);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
caps = gst_video_info_to_caps (&info);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
peercaps = gst_pad_peer_query_caps (mix->srcpad, NULL);
|
2014-08-14 13:36:44 +00:00
|
|
|
if (peercaps && !gst_caps_can_intersect (peercaps, caps)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
GstCaps *tmp;
|
|
|
|
|
|
|
|
s = gst_caps_get_structure (caps, 0);
|
|
|
|
gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, "height",
|
|
|
|
GST_TYPE_INT_RANGE, 1, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE,
|
|
|
|
0, 1, G_MAXINT, 1, NULL);
|
|
|
|
|
|
|
|
tmp = gst_caps_intersect (caps, peercaps);
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
gst_caps_unref (peercaps);
|
|
|
|
caps = tmp;
|
|
|
|
if (gst_caps_is_empty (caps)) {
|
2012-01-26 17:43:06 +00:00
|
|
|
GST_DEBUG_OBJECT (mix, "empty caps");
|
2010-09-06 01:31:34 +00:00
|
|
|
ret = FALSE;
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
2012-03-11 18:06:37 +00:00
|
|
|
caps = gst_caps_truncate (caps);
|
2010-07-26 14:07:15 +00:00
|
|
|
s = gst_caps_get_structure (caps, 0);
|
|
|
|
gst_structure_fixate_field_nearest_int (s, "width", best_width);
|
|
|
|
gst_structure_fixate_field_nearest_int (s, "height", best_height);
|
|
|
|
gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
|
|
|
|
best_fps_d);
|
|
|
|
|
2012-01-26 17:51:30 +00:00
|
|
|
gst_structure_get_int (s, "width", &info.width);
|
|
|
|
gst_structure_get_int (s, "height", &info.height);
|
|
|
|
gst_structure_get_fraction (s, "fraction", &info.fps_n, &info.fps_d);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2012-09-25 12:18:35 +00:00
|
|
|
gst_caps_unref (caps);
|
2012-01-26 17:51:30 +00:00
|
|
|
caps = gst_video_info_to_caps (&info);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
2012-01-26 17:15:51 +00:00
|
|
|
ret = gst_videomixer2_src_setcaps (mix->srcpad, mix, caps);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_caps_unref (caps);
|
|
|
|
} else {
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
done:
|
2012-09-13 00:10:00 +00:00
|
|
|
GST_VIDEO_MIXER2_SETCAPS_UNLOCK (mix);
|
2013-07-26 17:40:53 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
static gboolean
|
|
|
|
gst_videomixer2_update_converters (GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
GSList *tmp;
|
|
|
|
GstVideoFormat best_format;
|
|
|
|
GstVideoInfo best_info;
|
|
|
|
GstVideoMixer2Pad *pad;
|
|
|
|
gboolean need_alpha = FALSE;
|
2013-11-27 15:15:12 +00:00
|
|
|
gboolean at_least_one_alpha = FALSE;
|
2013-07-26 17:40:53 +00:00
|
|
|
GstCaps *downstream_caps;
|
|
|
|
GstCaps *possible_caps;
|
|
|
|
gchar *best_colorimetry;
|
|
|
|
const gchar *best_chroma;
|
2014-04-16 15:33:46 +00:00
|
|
|
GHashTable *formats_table;
|
2013-07-26 17:40:53 +00:00
|
|
|
gint best_format_number = 0;
|
|
|
|
|
|
|
|
best_format = GST_VIDEO_FORMAT_UNKNOWN;
|
|
|
|
gst_video_info_init (&best_info);
|
|
|
|
|
|
|
|
downstream_caps = gst_pad_get_allowed_caps (mix->srcpad);
|
|
|
|
|
2014-08-08 10:36:01 +00:00
|
|
|
if (!downstream_caps || gst_caps_is_empty (downstream_caps)) {
|
|
|
|
if (downstream_caps)
|
|
|
|
gst_caps_unref (downstream_caps);
|
2013-07-26 17:40:53 +00:00
|
|
|
return FALSE;
|
2014-08-08 10:36:01 +00:00
|
|
|
}
|
2013-07-26 17:40:53 +00:00
|
|
|
|
2014-04-16 15:33:46 +00:00
|
|
|
formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
/* first find new preferred format */
|
|
|
|
for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
|
|
|
|
GstStructure *s;
|
|
|
|
gint format_number;
|
|
|
|
|
|
|
|
pad = tmp->data;
|
|
|
|
|
|
|
|
if (!pad->info.finfo)
|
|
|
|
continue;
|
|
|
|
|
2013-11-27 15:15:12 +00:00
|
|
|
if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
|
|
|
|
at_least_one_alpha = TRUE;
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
/* If we want alpha, disregard all the other formats */
|
|
|
|
if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
/* This can happen if we release a pad and another pad hasn't been negotiated yet */
|
|
|
|
if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
possible_caps = gst_video_info_to_caps (&pad->info);
|
|
|
|
|
|
|
|
s = gst_caps_get_structure (possible_caps, 0);
|
|
|
|
gst_structure_remove_fields (s, "width", "height", "framerate",
|
|
|
|
"pixel-aspect-ratio", "interlace-mode", NULL);
|
|
|
|
|
|
|
|
/* Can downstream accept this format ? */
|
|
|
|
if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
|
|
|
|
gst_caps_unref (possible_caps);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_caps_unref (possible_caps);
|
|
|
|
|
|
|
|
format_number =
|
|
|
|
GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
|
|
|
|
GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
|
|
|
|
format_number += 1;
|
|
|
|
|
|
|
|
g_hash_table_replace (formats_table,
|
|
|
|
GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
|
|
|
|
GINT_TO_POINTER (format_number));
|
|
|
|
|
|
|
|
/* If that pad is the first with alpha, set it as the new best format */
|
|
|
|
if (!need_alpha && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
|
|
|
|
need_alpha = TRUE;
|
|
|
|
best_format = GST_VIDEO_INFO_FORMAT (&pad->info);
|
|
|
|
best_info = pad->info;
|
|
|
|
best_format_number = format_number;
|
|
|
|
} else if (format_number > best_format_number) {
|
|
|
|
best_format = GST_VIDEO_INFO_FORMAT (&pad->info);
|
|
|
|
best_info = pad->info;
|
|
|
|
best_format_number = format_number;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
g_hash_table_unref (formats_table);
|
|
|
|
|
|
|
|
if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
|
|
|
|
downstream_caps = gst_caps_fixate (downstream_caps);
|
|
|
|
gst_video_info_from_caps (&best_info, downstream_caps);
|
|
|
|
best_format = GST_VIDEO_INFO_FORMAT (&best_info);
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_caps_unref (downstream_caps);
|
|
|
|
|
2013-11-27 15:15:12 +00:00
|
|
|
if (at_least_one_alpha
|
|
|
|
&& !(best_info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
|
|
|
|
GST_ELEMENT_ERROR (mix, CORE, NEGOTIATION,
|
|
|
|
("At least one of the input pads contains alpha, but downstream can't support alpha."),
|
|
|
|
("Either convert your inputs to not contain alpha or add a videoconvert after the mixer"));
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
best_colorimetry = gst_video_colorimetry_to_string (&(best_info.colorimetry));
|
|
|
|
best_chroma = gst_video_chroma_to_string (best_info.chroma_site);
|
2014-09-02 07:13:44 +00:00
|
|
|
|
|
|
|
if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&best_info) ||
|
|
|
|
GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&best_info)) {
|
|
|
|
if (mix->segment.position != -1) {
|
|
|
|
mix->ts_offset = mix->segment.position - mix->segment.start;
|
|
|
|
mix->nframes = 0;
|
|
|
|
} else {
|
|
|
|
mix->ts_offset += gst_util_uint64_scale_round (mix->nframes,
|
|
|
|
GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
|
|
|
|
GST_VIDEO_INFO_FPS_N (&mix->info));
|
|
|
|
mix->nframes = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
mix->info = best_info;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (mix,
|
|
|
|
"The output format will now be : %d with colorimetry : %s and chroma : %s",
|
|
|
|
best_format, best_colorimetry, best_chroma);
|
|
|
|
|
|
|
|
/* Then browse the sinks once more, setting or unsetting conversion if needed */
|
|
|
|
for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
|
|
|
|
gchar *colorimetry;
|
|
|
|
const gchar *chroma;
|
|
|
|
|
|
|
|
pad = tmp->data;
|
|
|
|
|
|
|
|
if (!pad->info.finfo)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (pad->convert)
|
2014-09-24 14:46:36 +00:00
|
|
|
gst_video_converter_free (pad->convert);
|
2013-07-26 17:40:53 +00:00
|
|
|
|
|
|
|
pad->convert = NULL;
|
|
|
|
|
|
|
|
colorimetry = gst_video_colorimetry_to_string (&(pad->info.colorimetry));
|
|
|
|
chroma = gst_video_chroma_to_string (pad->info.chroma_site);
|
|
|
|
|
|
|
|
if (best_format != GST_VIDEO_INFO_FORMAT (&pad->info) ||
|
|
|
|
g_strcmp0 (colorimetry, best_colorimetry) ||
|
|
|
|
g_strcmp0 (chroma, best_chroma)) {
|
2014-10-01 14:24:59 +00:00
|
|
|
GstVideoInfo tmp_info = pad->info;
|
|
|
|
tmp_info.finfo = best_info.finfo;
|
|
|
|
tmp_info.chroma_site = best_info.chroma_site;
|
|
|
|
tmp_info.colorimetry = best_info.colorimetry;
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
|
|
|
|
GST_VIDEO_INFO_FORMAT (&pad->info),
|
|
|
|
GST_VIDEO_INFO_FORMAT (&best_info));
|
2014-10-01 14:29:29 +00:00
|
|
|
pad->convert = gst_video_converter_new (&pad->info, &tmp_info, NULL);
|
2013-07-26 17:40:53 +00:00
|
|
|
pad->need_conversion_update = TRUE;
|
|
|
|
if (!pad->convert) {
|
|
|
|
g_free (colorimetry);
|
|
|
|
g_free (best_colorimetry);
|
|
|
|
GST_WARNING ("No path found for conversion");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
|
|
|
|
}
|
|
|
|
g_free (colorimetry);
|
|
|
|
}
|
|
|
|
|
|
|
|
g_free (best_colorimetry);
|
|
|
|
return TRUE;
|
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
static gboolean
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstObject * parent,
|
|
|
|
GstCaps * caps)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix;
|
|
|
|
GstVideoMixer2Pad *mixpad;
|
2012-01-25 17:40:03 +00:00
|
|
|
GstVideoInfo info;
|
2010-07-26 14:07:15 +00:00
|
|
|
gboolean ret = FALSE;
|
|
|
|
|
|
|
|
GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
mix = GST_VIDEO_MIXER2 (parent);
|
2010-07-26 14:07:15 +00:00
|
|
|
mixpad = GST_VIDEO_MIXER2_PAD (pad);
|
|
|
|
|
2012-01-26 17:43:06 +00:00
|
|
|
if (!gst_video_info_from_caps (&info, caps)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_ERROR_OBJECT (pad, "Failed to parse caps");
|
|
|
|
goto beach;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
2012-01-25 17:40:03 +00:00
|
|
|
if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN) {
|
2013-07-26 17:40:53 +00:00
|
|
|
if (GST_VIDEO_INFO_PAR_N (&mix->info) != GST_VIDEO_INFO_PAR_N (&info)
|
|
|
|
|| GST_VIDEO_INFO_PAR_D (&mix->info) != GST_VIDEO_INFO_PAR_D (&info) ||
|
|
|
|
GST_VIDEO_INFO_INTERLACE_MODE (&mix->info) !=
|
|
|
|
GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
|
|
|
|
GST_DEBUG_OBJECT (pad,
|
|
|
|
"got input caps %" GST_PTR_FORMAT ", but " "current caps are %"
|
|
|
|
GST_PTR_FORMAT, caps, mix->current_caps);
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
2013-05-18 19:06:49 +00:00
|
|
|
return FALSE;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
mixpad->info = info;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
GST_COLLECT_PADS_STREAM_LOCK (mix->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
ret = gst_videomixer2_update_converters (mix);
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
if (ret)
|
|
|
|
ret = gst_videomixer2_update_src_caps (mix);
|
|
|
|
GST_COLLECT_PADS_STREAM_UNLOCK (mix->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
beach:
|
2012-01-26 17:15:51 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
static GstCaps *
|
2012-09-17 17:17:00 +00:00
|
|
|
gst_videomixer2_pad_sink_getcaps (GstPad * pad, GstVideoMixer2 * mix,
|
2012-01-26 17:15:51 +00:00
|
|
|
GstCaps * filter)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstCaps *srccaps;
|
2013-07-26 17:40:53 +00:00
|
|
|
GstCaps *template_caps;
|
|
|
|
GstCaps *filtered_caps;
|
|
|
|
GstCaps *returned_caps;
|
2010-07-26 14:07:15 +00:00
|
|
|
GstStructure *s;
|
2013-07-26 17:40:53 +00:00
|
|
|
gboolean had_current_caps = TRUE;
|
2010-07-26 14:07:15 +00:00
|
|
|
gint i, n;
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
template_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
srccaps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
|
2013-07-26 17:40:53 +00:00
|
|
|
if (srccaps == NULL) {
|
|
|
|
had_current_caps = FALSE;
|
|
|
|
srccaps = template_caps;
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
srccaps = gst_caps_make_writable (srccaps);
|
|
|
|
|
|
|
|
n = gst_caps_get_size (srccaps);
|
|
|
|
for (i = 0; i < n; i++) {
|
|
|
|
s = gst_caps_get_structure (srccaps, i);
|
|
|
|
gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
|
|
|
|
if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
|
|
|
|
gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
|
|
|
|
NULL);
|
2013-07-26 17:40:53 +00:00
|
|
|
|
|
|
|
gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
|
|
|
|
NULL);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
filtered_caps = srccaps;
|
|
|
|
if (filter)
|
|
|
|
filtered_caps = gst_caps_intersect (srccaps, filter);
|
|
|
|
returned_caps = gst_caps_intersect (filtered_caps, template_caps);
|
|
|
|
|
|
|
|
gst_caps_unref (srccaps);
|
|
|
|
if (filter)
|
|
|
|
gst_caps_unref (filtered_caps);
|
|
|
|
if (had_current_caps)
|
|
|
|
gst_caps_unref (template_caps);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
return returned_caps;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-09-17 17:17:00 +00:00
|
|
|
gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstVideoMixer2 * mix,
|
2012-01-26 17:15:51 +00:00
|
|
|
GstCaps * caps)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
gboolean ret;
|
2013-07-26 17:40:53 +00:00
|
|
|
GstCaps *modified_caps;
|
2010-07-26 14:07:15 +00:00
|
|
|
GstCaps *accepted_caps;
|
2013-07-26 17:40:53 +00:00
|
|
|
GstCaps *template_caps;
|
|
|
|
gboolean had_current_caps = TRUE;
|
2010-07-26 14:07:15 +00:00
|
|
|
gint i, n;
|
|
|
|
GstStructure *s;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
accepted_caps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
|
2013-07-26 17:40:53 +00:00
|
|
|
|
|
|
|
template_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
|
|
|
|
|
|
|
|
if (accepted_caps == NULL) {
|
|
|
|
accepted_caps = template_caps;
|
|
|
|
had_current_caps = FALSE;
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
accepted_caps = gst_caps_make_writable (accepted_caps);
|
2013-07-26 17:40:53 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
|
|
|
|
|
|
|
|
n = gst_caps_get_size (accepted_caps);
|
|
|
|
for (i = 0; i < n; i++) {
|
|
|
|
s = gst_caps_get_structure (accepted_caps, i);
|
|
|
|
gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
|
|
|
|
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
|
|
|
|
if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
|
|
|
|
gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
|
|
|
|
NULL);
|
2013-07-26 17:40:53 +00:00
|
|
|
|
|
|
|
gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
|
|
|
|
NULL);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
modified_caps = gst_caps_intersect (accepted_caps, template_caps);
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
ret = gst_caps_can_intersect (caps, accepted_caps);
|
2013-07-26 17:40:53 +00:00
|
|
|
GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
|
|
|
|
(ret ? "" : "not "), caps);
|
|
|
|
GST_DEBUG_OBJECT (pad, "acceptable caps are %" GST_PTR_FORMAT, accepted_caps);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_caps_unref (accepted_caps);
|
2013-07-26 17:40:53 +00:00
|
|
|
gst_caps_unref (modified_caps);
|
|
|
|
if (had_current_caps)
|
|
|
|
gst_caps_unref (template_caps);
|
2012-01-26 17:15:51 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-09-17 17:17:00 +00:00
|
|
|
gst_videomixer2_sink_query (GstCollectPads * pads, GstCollectData * cdata,
|
|
|
|
GstQuery * query, GstVideoMixer2 * mix)
|
2012-01-26 17:15:51 +00:00
|
|
|
{
|
2012-09-17 17:17:00 +00:00
|
|
|
GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (cdata->pad);
|
2012-01-26 17:15:51 +00:00
|
|
|
gboolean ret = FALSE;
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_CAPS:
|
|
|
|
{
|
|
|
|
GstCaps *filter, *caps;
|
|
|
|
|
|
|
|
gst_query_parse_caps (query, &filter);
|
2012-09-17 17:17:00 +00:00
|
|
|
caps = gst_videomixer2_pad_sink_getcaps (GST_PAD (pad), mix, filter);
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_query_set_caps_result (query, caps);
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_QUERY_ACCEPT_CAPS:
|
|
|
|
{
|
|
|
|
GstCaps *caps;
|
|
|
|
|
|
|
|
gst_query_parse_accept_caps (query, &caps);
|
2012-09-17 17:17:00 +00:00
|
|
|
ret = gst_videomixer2_pad_sink_acceptcaps (GST_PAD (pad), mix, caps);
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_query_set_accept_caps_result (query, ret);
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
2012-09-18 10:12:39 +00:00
|
|
|
ret = gst_collect_pads_query_default (pads, cdata, query, FALSE);
|
2012-01-26 17:15:51 +00:00
|
|
|
break;
|
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_pad_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_PAD_ZORDER:
|
|
|
|
g_value_set_uint (value, pad->zorder);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_XPOS:
|
|
|
|
g_value_set_int (value, pad->xpos);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_YPOS:
|
|
|
|
g_value_set_int (value, pad->ypos);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_ALPHA:
|
|
|
|
g_value_set_double (value, pad->alpha);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int
|
|
|
|
pad_zorder_compare (const GstVideoMixer2Pad * pad1,
|
|
|
|
const GstVideoMixer2Pad * pad2)
|
|
|
|
{
|
|
|
|
return pad1->zorder - pad2->zorder;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_pad_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (object);
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (GST_PAD (pad)));
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_PAD_ZORDER:
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
pad->zorder = g_value_get_uint (value);
|
|
|
|
|
|
|
|
mix->sinkpads = g_slist_sort (mix->sinkpads,
|
|
|
|
(GCompareFunc) pad_zorder_compare);
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_XPOS:
|
|
|
|
pad->xpos = g_value_get_int (value);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_YPOS:
|
|
|
|
pad->ypos = g_value_get_int (value);
|
|
|
|
break;
|
|
|
|
case PROP_PAD_ALPHA:
|
|
|
|
pad->alpha = g_value_get_double (value);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
gst_object_unref (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_pad_class_init (GstVideoMixer2PadClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class = (GObjectClass *) klass;
|
|
|
|
|
|
|
|
gobject_class->set_property = gst_videomixer2_pad_set_property;
|
|
|
|
gobject_class->get_property = gst_videomixer2_pad_get_property;
|
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
|
|
|
|
g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
|
|
|
|
0, 10000, DEFAULT_PAD_ZORDER,
|
|
|
|
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_PAD_XPOS,
|
|
|
|
g_param_spec_int ("xpos", "X Position", "X Position of the picture",
|
|
|
|
G_MININT, G_MAXINT, DEFAULT_PAD_XPOS,
|
|
|
|
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_PAD_YPOS,
|
|
|
|
g_param_spec_int ("ypos", "Y Position", "Y Position of the picture",
|
|
|
|
G_MININT, G_MAXINT, DEFAULT_PAD_YPOS,
|
|
|
|
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
|
|
|
|
g_object_class_install_property (gobject_class, PROP_PAD_ALPHA,
|
|
|
|
g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", 0.0, 1.0,
|
|
|
|
DEFAULT_PAD_ALPHA,
|
|
|
|
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_pad_init (GstVideoMixer2Pad * mixerpad)
|
|
|
|
{
|
|
|
|
mixerpad->zorder = DEFAULT_PAD_ZORDER;
|
|
|
|
mixerpad->xpos = DEFAULT_PAD_XPOS;
|
|
|
|
mixerpad->ypos = DEFAULT_PAD_YPOS;
|
|
|
|
mixerpad->alpha = DEFAULT_PAD_ALPHA;
|
2013-07-26 17:40:53 +00:00
|
|
|
mixerpad->convert = NULL;
|
|
|
|
mixerpad->need_conversion_update = FALSE;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* GstVideoMixer2 */
|
|
|
|
#define DEFAULT_BACKGROUND VIDEO_MIXER2_BACKGROUND_CHECKER
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_0,
|
|
|
|
PROP_BACKGROUND
|
|
|
|
};
|
|
|
|
|
|
|
|
#define GST_TYPE_VIDEO_MIXER2_BACKGROUND (gst_videomixer2_background_get_type())
|
|
|
|
static GType
|
|
|
|
gst_videomixer2_background_get_type (void)
|
|
|
|
{
|
|
|
|
static GType video_mixer_background_type = 0;
|
|
|
|
|
|
|
|
static const GEnumValue video_mixer_background[] = {
|
|
|
|
{VIDEO_MIXER2_BACKGROUND_CHECKER, "Checker pattern", "checker"},
|
|
|
|
{VIDEO_MIXER2_BACKGROUND_BLACK, "Black", "black"},
|
|
|
|
{VIDEO_MIXER2_BACKGROUND_WHITE, "White", "white"},
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
{VIDEO_MIXER2_BACKGROUND_TRANSPARENT,
|
|
|
|
"Transparent Background to enable further mixing", "transparent"},
|
2010-07-26 14:07:15 +00:00
|
|
|
{0, NULL, NULL},
|
|
|
|
};
|
|
|
|
|
|
|
|
if (!video_mixer_background_type) {
|
|
|
|
video_mixer_background_type =
|
|
|
|
g_enum_register_static ("GstVideoMixer2Background",
|
|
|
|
video_mixer_background);
|
|
|
|
}
|
|
|
|
return video_mixer_background_type;
|
|
|
|
}
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
#define gst_videomixer2_parent_class parent_class
|
|
|
|
G_DEFINE_TYPE_WITH_CODE (GstVideoMixer2, gst_videomixer2, GST_TYPE_ELEMENT,
|
|
|
|
G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
|
|
|
|
gst_videomixer2_child_proxy_init));
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_update_qos (GstVideoMixer2 * mix, gdouble proportion,
|
|
|
|
GstClockTimeDiff diff, GstClockTime timestamp)
|
|
|
|
{
|
|
|
|
GST_DEBUG_OBJECT (mix,
|
|
|
|
"Updating QoS: proportion %lf, diff %s%" GST_TIME_FORMAT ", timestamp %"
|
|
|
|
GST_TIME_FORMAT, proportion, (diff < 0) ? "-" : "",
|
|
|
|
GST_TIME_ARGS (ABS (diff)), GST_TIME_ARGS (timestamp));
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (mix);
|
|
|
|
mix->proportion = proportion;
|
|
|
|
if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
|
2014-11-17 12:16:03 +00:00
|
|
|
if (!mix->live && G_UNLIKELY (diff > 0))
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->earliest_time =
|
2013-07-19 20:59:15 +00:00
|
|
|
timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
|
2012-01-25 17:40:03 +00:00
|
|
|
GST_VIDEO_INFO_FPS_D (&mix->info), GST_VIDEO_INFO_FPS_N (&mix->info));
|
2010-07-26 14:07:15 +00:00
|
|
|
else
|
|
|
|
mix->earliest_time = timestamp + diff;
|
|
|
|
} else {
|
|
|
|
mix->earliest_time = GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
|
|
|
GST_OBJECT_UNLOCK (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_reset_qos (GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
gst_videomixer2_update_qos (mix, 0.5, 0, GST_CLOCK_TIME_NONE);
|
|
|
|
mix->qos_processed = mix->qos_dropped = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_read_qos (GstVideoMixer2 * mix, gdouble * proportion,
|
|
|
|
GstClockTime * time)
|
|
|
|
{
|
|
|
|
GST_OBJECT_LOCK (mix);
|
|
|
|
*proportion = mix->proportion;
|
|
|
|
*time = mix->earliest_time;
|
|
|
|
GST_OBJECT_UNLOCK (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_reset (GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
GSList *l;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_video_info_init (&mix->info);
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->ts_offset = 0;
|
|
|
|
mix->nframes = 0;
|
|
|
|
|
|
|
|
gst_segment_init (&mix->segment, GST_FORMAT_TIME);
|
2012-01-25 17:40:03 +00:00
|
|
|
mix->segment.position = -1;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
gst_videomixer2_reset_qos (mix);
|
|
|
|
|
|
|
|
for (l = mix->sinkpads; l; l = l->next) {
|
|
|
|
GstVideoMixer2Pad *p = l->data;
|
|
|
|
GstVideoMixer2Collect *mixcol = p->mixcol;
|
|
|
|
|
|
|
|
gst_buffer_replace (&mixcol->buffer, NULL);
|
|
|
|
mixcol->start_time = -1;
|
|
|
|
mixcol->end_time = -1;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_video_info_init (&p->info);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
mix->newseg_pending = TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* 1 == OK
|
|
|
|
* 0 == need more data
|
|
|
|
* -1 == EOS
|
|
|
|
* -2 == error
|
|
|
|
*/
|
|
|
|
static gint
|
|
|
|
gst_videomixer2_fill_queues (GstVideoMixer2 * mix,
|
|
|
|
GstClockTime output_start_time, GstClockTime output_end_time)
|
|
|
|
{
|
|
|
|
GSList *l;
|
|
|
|
gboolean eos = TRUE;
|
|
|
|
gboolean need_more_data = FALSE;
|
|
|
|
|
|
|
|
for (l = mix->sinkpads; l; l = l->next) {
|
|
|
|
GstVideoMixer2Pad *pad = l->data;
|
|
|
|
GstVideoMixer2Collect *mixcol = pad->mixcol;
|
|
|
|
GstSegment *segment = &pad->mixcol->collect.segment;
|
|
|
|
GstBuffer *buf;
|
2014-03-11 03:46:06 +00:00
|
|
|
GstVideoInfo *vinfo;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-04-17 13:14:27 +00:00
|
|
|
buf = gst_collect_pads_peek (mix->collect, &mixcol->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
if (buf) {
|
|
|
|
GstClockTime start_time, end_time;
|
|
|
|
|
|
|
|
start_time = GST_BUFFER_TIMESTAMP (buf);
|
|
|
|
if (start_time == -1) {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
GST_ERROR_OBJECT (pad, "Need timestamped buffers!");
|
|
|
|
return -2;
|
|
|
|
}
|
|
|
|
|
2014-03-11 03:46:06 +00:00
|
|
|
vinfo = &pad->info;
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
/* FIXME: Make all this work with negative rates */
|
|
|
|
|
|
|
|
if ((mixcol->buffer && start_time < GST_BUFFER_TIMESTAMP (mixcol->buffer))
|
|
|
|
|| (mixcol->queued
|
|
|
|
&& start_time < GST_BUFFER_TIMESTAMP (mixcol->queued))) {
|
|
|
|
GST_WARNING_OBJECT (pad, "Buffer from the past, dropping");
|
|
|
|
gst_buffer_unref (buf);
|
2012-04-17 13:14:27 +00:00
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
need_more_data = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mixcol->queued) {
|
|
|
|
end_time = start_time - GST_BUFFER_TIMESTAMP (mixcol->queued);
|
|
|
|
start_time = GST_BUFFER_TIMESTAMP (mixcol->queued);
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
buf = gst_buffer_ref (mixcol->queued);
|
2014-03-11 03:46:06 +00:00
|
|
|
vinfo = &mixcol->queued_vinfo;
|
2010-07-26 14:07:15 +00:00
|
|
|
} else {
|
|
|
|
end_time = GST_BUFFER_DURATION (buf);
|
|
|
|
|
|
|
|
if (end_time == -1) {
|
|
|
|
mixcol->queued = buf;
|
2013-07-09 10:42:17 +00:00
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
|
|
|
gst_buffer_unref (buf);
|
2014-03-11 03:46:06 +00:00
|
|
|
mixcol->queued_vinfo = pad->info;
|
2010-07-26 14:07:15 +00:00
|
|
|
need_more_data = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
g_assert (start_time != -1 && end_time != -1);
|
|
|
|
end_time += start_time; /* convert from duration to position */
|
|
|
|
|
|
|
|
/* Check if it's inside the segment */
|
|
|
|
if (start_time >= segment->stop || end_time < segment->start) {
|
|
|
|
GST_DEBUG_OBJECT (pad, "Buffer outside the segment");
|
|
|
|
|
|
|
|
if (buf == mixcol->queued) {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
gst_buffer_replace (&mixcol->queued, NULL);
|
|
|
|
} else {
|
|
|
|
gst_buffer_unref (buf);
|
2012-04-17 13:14:27 +00:00
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
need_more_data = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Clip to segment and convert to running time */
|
|
|
|
start_time = MAX (start_time, segment->start);
|
|
|
|
if (segment->stop != -1)
|
|
|
|
end_time = MIN (end_time, segment->stop);
|
|
|
|
start_time =
|
|
|
|
gst_segment_to_running_time (segment, GST_FORMAT_TIME, start_time);
|
|
|
|
end_time =
|
|
|
|
gst_segment_to_running_time (segment, GST_FORMAT_TIME, end_time);
|
|
|
|
g_assert (start_time != -1 && end_time != -1);
|
|
|
|
|
|
|
|
/* Convert to the output segment rate */
|
2012-01-25 17:40:03 +00:00
|
|
|
if (ABS (mix->segment.rate) != 1.0) {
|
|
|
|
start_time *= ABS (mix->segment.rate);
|
|
|
|
end_time *= ABS (mix->segment.rate);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2013-06-11 17:24:49 +00:00
|
|
|
if (mixcol->end_time != -1 && mixcol->end_time > end_time) {
|
|
|
|
GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
|
|
|
|
if (buf == mixcol->queued) {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
gst_buffer_replace (&mixcol->queued, NULL);
|
|
|
|
} else {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
need_more_data = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
if (end_time >= output_start_time && start_time < output_end_time) {
|
|
|
|
GST_DEBUG_OBJECT (pad,
|
|
|
|
"Taking new buffer with start time %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (start_time));
|
|
|
|
gst_buffer_replace (&mixcol->buffer, buf);
|
2014-03-11 03:46:06 +00:00
|
|
|
mixcol->buffer_vinfo = *vinfo;
|
2010-07-26 14:07:15 +00:00
|
|
|
mixcol->start_time = start_time;
|
|
|
|
mixcol->end_time = end_time;
|
|
|
|
|
|
|
|
if (buf == mixcol->queued) {
|
|
|
|
gst_buffer_unref (buf);
|
2011-10-13 11:29:50 +00:00
|
|
|
gst_buffer_replace (&mixcol->queued, NULL);
|
2010-07-26 14:07:15 +00:00
|
|
|
} else {
|
|
|
|
gst_buffer_unref (buf);
|
2012-04-17 13:14:27 +00:00
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
}
|
|
|
|
eos = FALSE;
|
|
|
|
} else if (start_time >= output_end_time) {
|
|
|
|
GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (start_time));
|
2011-10-14 05:26:16 +00:00
|
|
|
gst_buffer_unref (buf);
|
2010-07-26 14:07:15 +00:00
|
|
|
eos = FALSE;
|
|
|
|
} else {
|
|
|
|
GST_DEBUG_OBJECT (pad, "Too old buffer -- dropping");
|
|
|
|
if (buf == mixcol->queued) {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
gst_buffer_replace (&mixcol->queued, NULL);
|
|
|
|
} else {
|
|
|
|
gst_buffer_unref (buf);
|
2012-04-17 13:14:27 +00:00
|
|
|
buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
need_more_data = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (mixcol->end_time != -1) {
|
2013-01-31 11:55:18 +00:00
|
|
|
if (mixcol->end_time <= output_start_time) {
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_buffer_replace (&mixcol->buffer, NULL);
|
|
|
|
mixcol->start_time = mixcol->end_time = -1;
|
2012-04-17 13:14:27 +00:00
|
|
|
if (!GST_COLLECT_PADS_STATE_IS_SET (mixcol,
|
|
|
|
GST_COLLECT_PADS_STATE_EOS))
|
2010-07-26 14:07:15 +00:00
|
|
|
need_more_data = TRUE;
|
2013-09-03 22:27:50 +00:00
|
|
|
} else if (!GST_COLLECT_PADS_STATE_IS_SET (mixcol,
|
|
|
|
GST_COLLECT_PADS_STATE_EOS)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
eos = FALSE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (need_more_data)
|
|
|
|
return 0;
|
|
|
|
if (eos)
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_videomixer2_blend_buffers (GstVideoMixer2 * mix,
|
|
|
|
GstClockTime output_start_time, GstClockTime output_end_time,
|
|
|
|
GstBuffer ** outbuf)
|
|
|
|
{
|
|
|
|
GSList *l;
|
|
|
|
guint outsize;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
BlendFunction composite;
|
2012-01-26 17:15:51 +00:00
|
|
|
GstVideoFrame outframe;
|
2012-08-08 15:39:07 +00:00
|
|
|
static GstAllocationParams params = { 0, 15, 0, 0, };
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
outsize = GST_VIDEO_INFO_SIZE (&mix->info);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-03-15 12:36:17 +00:00
|
|
|
*outbuf = gst_buffer_new_allocate (NULL, outsize, ¶ms);
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
|
|
|
|
GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_video_frame_map (&outframe, &mix->info, *outbuf, GST_MAP_READWRITE);
|
|
|
|
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
/* default to blending */
|
|
|
|
composite = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
switch (mix->background) {
|
|
|
|
case VIDEO_MIXER2_BACKGROUND_CHECKER:
|
2012-01-26 17:15:51 +00:00
|
|
|
mix->fill_checker (&outframe);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
case VIDEO_MIXER2_BACKGROUND_BLACK:
|
2012-01-26 17:15:51 +00:00
|
|
|
mix->fill_color (&outframe, 16, 128, 128);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
case VIDEO_MIXER2_BACKGROUND_WHITE:
|
2012-01-26 17:15:51 +00:00
|
|
|
mix->fill_color (&outframe, 240, 128, 128);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
case VIDEO_MIXER2_BACKGROUND_TRANSPARENT:
|
2012-09-25 20:21:15 +00:00
|
|
|
{
|
2012-09-26 08:28:59 +00:00
|
|
|
guint i, plane, num_planes, height;
|
|
|
|
|
|
|
|
num_planes = GST_VIDEO_FRAME_N_PLANES (&outframe);
|
|
|
|
for (plane = 0; plane < num_planes; ++plane) {
|
|
|
|
guint8 *pdata;
|
|
|
|
gsize rowsize, plane_stride;
|
|
|
|
|
|
|
|
pdata = GST_VIDEO_FRAME_PLANE_DATA (&outframe, plane);
|
|
|
|
plane_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&outframe, plane);
|
|
|
|
rowsize = GST_VIDEO_FRAME_COMP_WIDTH (&outframe, plane)
|
|
|
|
* GST_VIDEO_FRAME_COMP_PSTRIDE (&outframe, plane);
|
|
|
|
height = GST_VIDEO_FRAME_COMP_HEIGHT (&outframe, plane);
|
|
|
|
for (i = 0; i < height; ++i) {
|
|
|
|
memset (pdata, 0, rowsize);
|
|
|
|
pdata += plane_stride;
|
|
|
|
}
|
|
|
|
}
|
2012-09-25 20:21:15 +00:00
|
|
|
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
/* use overlay to keep background transparent */
|
|
|
|
composite = mix->overlay;
|
|
|
|
break;
|
2012-09-25 20:21:15 +00:00
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (l = mix->sinkpads; l; l = l->next) {
|
|
|
|
GstVideoMixer2Pad *pad = l->data;
|
|
|
|
GstVideoMixer2Collect *mixcol = pad->mixcol;
|
|
|
|
|
|
|
|
if (mixcol->buffer != NULL) {
|
|
|
|
GstClockTime timestamp;
|
|
|
|
gint64 stream_time;
|
|
|
|
GstSegment *seg;
|
2013-07-26 17:40:53 +00:00
|
|
|
GstVideoFrame converted_frame;
|
|
|
|
GstBuffer *converted_buf = NULL;
|
2012-01-26 17:15:51 +00:00
|
|
|
GstVideoFrame frame;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
seg = &mixcol->collect.segment;
|
|
|
|
|
|
|
|
timestamp = GST_BUFFER_TIMESTAMP (mixcol->buffer);
|
|
|
|
|
|
|
|
stream_time =
|
|
|
|
gst_segment_to_stream_time (seg, GST_FORMAT_TIME, timestamp);
|
|
|
|
|
|
|
|
/* sync object properties on stream time */
|
|
|
|
if (GST_CLOCK_TIME_IS_VALID (stream_time))
|
2011-11-04 17:41:36 +00:00
|
|
|
gst_object_sync_values (GST_OBJECT (pad), stream_time);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2014-03-11 03:46:06 +00:00
|
|
|
gst_video_frame_map (&frame, &mixcol->buffer_vinfo, mixcol->buffer,
|
|
|
|
GST_MAP_READ);
|
2012-01-26 17:15:51 +00:00
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
if (pad->convert) {
|
|
|
|
gint converted_size;
|
|
|
|
|
|
|
|
/* We wait until here to set the conversion infos, in case mix->info changed */
|
|
|
|
if (pad->need_conversion_update) {
|
|
|
|
pad->conversion_info = mix->info;
|
|
|
|
gst_video_info_set_format (&(pad->conversion_info),
|
|
|
|
GST_VIDEO_INFO_FORMAT (&mix->info), pad->info.width,
|
|
|
|
pad->info.height);
|
|
|
|
pad->need_conversion_update = FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
converted_size = pad->conversion_info.size;
|
|
|
|
converted_size = converted_size > outsize ? converted_size : outsize;
|
|
|
|
converted_buf = gst_buffer_new_allocate (NULL, converted_size, ¶ms);
|
|
|
|
|
|
|
|
gst_video_frame_map (&converted_frame, &(pad->conversion_info),
|
|
|
|
converted_buf, GST_MAP_READWRITE);
|
2014-11-03 14:26:06 +00:00
|
|
|
gst_video_converter_frame (pad->convert, &frame, &converted_frame);
|
2013-07-26 17:40:53 +00:00
|
|
|
gst_video_frame_unmap (&frame);
|
|
|
|
} else {
|
|
|
|
converted_frame = frame;
|
|
|
|
}
|
|
|
|
|
|
|
|
composite (&converted_frame, pad->xpos, pad->ypos, pad->alpha, &outframe);
|
|
|
|
|
|
|
|
if (pad->convert)
|
|
|
|
gst_buffer_unref (converted_buf);
|
2012-01-26 17:15:51 +00:00
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
gst_video_frame_unmap (&converted_frame);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_video_frame_unmap (&outframe);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Perform qos calculations before processing the next frame. Returns TRUE if
|
|
|
|
* the frame should be processed, FALSE if the frame can be dropped entirely */
|
|
|
|
static gint64
|
|
|
|
gst_videomixer2_do_qos (GstVideoMixer2 * mix, GstClockTime timestamp)
|
|
|
|
{
|
|
|
|
GstClockTime qostime, earliest_time;
|
|
|
|
gdouble proportion;
|
|
|
|
gint64 jitter;
|
|
|
|
|
|
|
|
/* no timestamp, can't do QoS => process frame */
|
|
|
|
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
|
|
|
|
GST_LOG_OBJECT (mix, "invalid timestamp, can't do QoS, process frame");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* get latest QoS observation values */
|
|
|
|
gst_videomixer2_read_qos (mix, &proportion, &earliest_time);
|
|
|
|
|
|
|
|
/* skip qos if we have no observation (yet) => process frame */
|
|
|
|
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
|
|
|
|
GST_LOG_OBJECT (mix, "no observation yet, process frame");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* qos is done on running time */
|
|
|
|
qostime =
|
|
|
|
gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME, timestamp);
|
|
|
|
|
|
|
|
/* see how our next timestamp relates to the latest qos timestamp */
|
|
|
|
GST_LOG_OBJECT (mix, "qostime %" GST_TIME_FORMAT ", earliest %"
|
|
|
|
GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
|
|
|
|
|
|
|
|
jitter = GST_CLOCK_DIFF (qostime, earliest_time);
|
|
|
|
if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
|
|
|
|
GST_DEBUG_OBJECT (mix, "we are late, drop frame");
|
|
|
|
return jitter;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (mix, "process frame");
|
|
|
|
return jitter;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_videomixer2_collected (GstCollectPads * pads, GstVideoMixer2 * mix)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstFlowReturn ret;
|
|
|
|
GstClockTime output_start_time, output_end_time;
|
|
|
|
GstBuffer *outbuf = NULL;
|
|
|
|
gint res;
|
|
|
|
gint64 jitter;
|
|
|
|
|
|
|
|
/* If we're not negotiated yet... */
|
2012-01-26 17:15:51 +00:00
|
|
|
if (GST_VIDEO_INFO_FORMAT (&mix->info) == GST_VIDEO_FORMAT_UNKNOWN)
|
2010-07-26 14:07:15 +00:00
|
|
|
return GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
|
2013-04-25 18:59:52 +00:00
|
|
|
if (mix->send_stream_start) {
|
|
|
|
gchar s_id[32];
|
|
|
|
|
|
|
|
/* stream-start (FIXME: create id based on input ids) */
|
|
|
|
g_snprintf (s_id, sizeof (s_id), "mix-%08x", g_random_int ());
|
|
|
|
if (!gst_pad_push_event (mix->srcpad, gst_event_new_stream_start (s_id))) {
|
|
|
|
GST_WARNING_OBJECT (mix->srcpad, "Sending stream start event failed");
|
|
|
|
}
|
|
|
|
mix->send_stream_start = FALSE;
|
|
|
|
}
|
|
|
|
|
2013-10-03 19:36:34 +00:00
|
|
|
if (gst_pad_check_reconfigure (mix->srcpad))
|
|
|
|
gst_videomixer2_update_src_caps (mix);
|
|
|
|
|
2013-05-18 18:36:39 +00:00
|
|
|
if (mix->send_caps) {
|
|
|
|
if (!gst_pad_push_event (mix->srcpad,
|
|
|
|
gst_event_new_caps (mix->current_caps))) {
|
|
|
|
GST_WARNING_OBJECT (mix->srcpad, "Sending caps event failed");
|
|
|
|
}
|
|
|
|
mix->send_caps = FALSE;
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
|
|
|
|
if (mix->newseg_pending) {
|
|
|
|
GST_DEBUG_OBJECT (mix, "Sending NEWSEGMENT event");
|
2013-05-23 01:00:45 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
2012-01-26 17:15:51 +00:00
|
|
|
if (!gst_pad_push_event (mix->srcpad,
|
|
|
|
gst_event_new_segment (&mix->segment))) {
|
2010-07-26 14:07:15 +00:00
|
|
|
ret = GST_FLOW_ERROR;
|
2013-05-23 01:00:45 +00:00
|
|
|
goto done_unlocked;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
2013-05-23 01:00:45 +00:00
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->newseg_pending = FALSE;
|
|
|
|
}
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
if (mix->segment.position == -1)
|
2010-07-26 14:07:15 +00:00
|
|
|
output_start_time = mix->segment.start;
|
|
|
|
else
|
2012-01-25 17:40:03 +00:00
|
|
|
output_start_time = mix->segment.position;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-08-06 09:58:38 +00:00
|
|
|
output_end_time =
|
|
|
|
mix->ts_offset + gst_util_uint64_scale_round (mix->nframes + 1,
|
|
|
|
GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
|
|
|
|
GST_VIDEO_INFO_FPS_N (&mix->info)) + mix->segment.start;
|
|
|
|
|
2013-08-07 17:32:07 +00:00
|
|
|
if (output_end_time >= mix->segment.stop) {
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_DEBUG_OBJECT (mix, "Segment done");
|
2013-08-07 17:32:07 +00:00
|
|
|
if (!(mix->segment.flags & GST_SEGMENT_FLAG_SEGMENT)) {
|
2014-07-04 19:22:10 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
2013-08-07 17:32:07 +00:00
|
|
|
gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
|
|
|
|
|
|
|
|
ret = GST_FLOW_EOS;
|
|
|
|
goto done_unlocked;
|
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2013-09-14 01:27:09 +00:00
|
|
|
if (G_UNLIKELY (mix->pending_tags)) {
|
|
|
|
gst_pad_push_event (mix->srcpad, gst_event_new_tag (mix->pending_tags));
|
|
|
|
mix->pending_tags = NULL;
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
if (mix->segment.stop != -1)
|
|
|
|
output_end_time = MIN (output_end_time, mix->segment.stop);
|
|
|
|
|
|
|
|
res = gst_videomixer2_fill_queues (mix, output_start_time, output_end_time);
|
|
|
|
|
|
|
|
if (res == 0) {
|
|
|
|
GST_DEBUG_OBJECT (mix, "Need more data for decisions");
|
|
|
|
ret = GST_FLOW_OK;
|
|
|
|
goto done;
|
|
|
|
} else if (res == -1) {
|
2013-05-23 01:00:45 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
2013-06-12 16:23:46 +00:00
|
|
|
GST_DEBUG_OBJECT (mix, "All sinkpads are EOS -- forwarding");
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
|
2012-01-04 09:01:48 +00:00
|
|
|
ret = GST_FLOW_EOS;
|
2013-05-23 01:00:45 +00:00
|
|
|
goto done_unlocked;
|
2010-07-26 14:07:15 +00:00
|
|
|
} else if (res == -2) {
|
|
|
|
GST_ERROR_OBJECT (mix, "Error collecting buffers");
|
|
|
|
ret = GST_FLOW_ERROR;
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
|
|
|
jitter = gst_videomixer2_do_qos (mix, output_start_time);
|
|
|
|
if (jitter <= 0) {
|
|
|
|
ret =
|
|
|
|
gst_videomixer2_blend_buffers (mix, output_start_time,
|
|
|
|
output_end_time, &outbuf);
|
|
|
|
mix->qos_processed++;
|
|
|
|
} else {
|
|
|
|
GstMessage *msg;
|
|
|
|
|
|
|
|
mix->qos_dropped++;
|
|
|
|
|
|
|
|
/* TODO: live */
|
|
|
|
msg =
|
|
|
|
gst_message_new_qos (GST_OBJECT_CAST (mix), FALSE,
|
|
|
|
gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME,
|
|
|
|
output_start_time), gst_segment_to_stream_time (&mix->segment,
|
|
|
|
GST_FORMAT_TIME, output_start_time), output_start_time,
|
|
|
|
output_end_time - output_start_time);
|
|
|
|
gst_message_set_qos_values (msg, jitter, mix->proportion, 1000000);
|
|
|
|
gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS, mix->qos_processed,
|
|
|
|
mix->qos_dropped);
|
|
|
|
gst_element_post_message (GST_ELEMENT_CAST (mix), msg);
|
|
|
|
|
|
|
|
ret = GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
mix->segment.position = output_end_time;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->nframes++;
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
if (outbuf) {
|
|
|
|
GST_LOG_OBJECT (mix,
|
|
|
|
"Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
|
|
|
|
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
|
|
|
|
ret = gst_pad_push (mix->srcpad, outbuf);
|
|
|
|
}
|
2013-05-23 01:00:45 +00:00
|
|
|
goto done_unlocked;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
done:
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
|
2013-05-23 01:00:45 +00:00
|
|
|
done_unlocked:
|
2010-07-26 14:07:15 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-05-30 12:48:42 +00:00
|
|
|
/* FIXME, the duration query should reflect how long you will produce
|
|
|
|
* data, that is the amount of stream time until you will emit EOS.
|
|
|
|
*
|
|
|
|
* For synchronized mixing this is always the max of all the durations
|
|
|
|
* of upstream since we emit EOS when all of them finished.
|
|
|
|
*
|
|
|
|
* We don't do synchronized mixing so this really depends on where the
|
|
|
|
* streams where punched in and what their relative offsets are against
|
|
|
|
* eachother which we can get from the first timestamps we see.
|
|
|
|
*
|
|
|
|
* When we add a new stream (or remove a stream) the duration might
|
|
|
|
* also become invalid again and we need to post a new DURATION
|
|
|
|
* message to notify this fact to the parent.
|
|
|
|
* For now we take the max of all the upstream elements so the simple
|
|
|
|
* cases work at least somewhat.
|
|
|
|
*/
|
2010-07-26 14:07:15 +00:00
|
|
|
static gboolean
|
|
|
|
gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
|
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GValue item = { 0 };
|
2010-07-26 14:07:15 +00:00
|
|
|
gint64 max;
|
|
|
|
gboolean res;
|
|
|
|
GstFormat format;
|
|
|
|
GstIterator *it;
|
|
|
|
gboolean done;
|
|
|
|
|
|
|
|
/* parse format */
|
|
|
|
gst_query_parse_duration (query, &format, NULL);
|
|
|
|
|
|
|
|
max = -1;
|
|
|
|
res = TRUE;
|
|
|
|
done = FALSE;
|
|
|
|
|
|
|
|
/* Take maximum of all durations */
|
|
|
|
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
|
|
|
|
while (!done) {
|
2012-01-26 17:15:51 +00:00
|
|
|
switch (gst_iterator_next (it, &item)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
case GST_ITERATOR_DONE:
|
|
|
|
done = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_ITERATOR_OK:
|
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GstPad *pad;
|
2010-07-26 14:07:15 +00:00
|
|
|
gint64 duration;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
pad = g_value_get_object (&item);
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
/* ask sink peer for duration */
|
2012-01-26 17:15:51 +00:00
|
|
|
res &= gst_pad_peer_query_duration (pad, format, &duration);
|
2010-07-26 14:07:15 +00:00
|
|
|
/* take max from all valid return values */
|
|
|
|
if (res) {
|
|
|
|
/* valid unknown length, stop searching */
|
|
|
|
if (duration == -1) {
|
|
|
|
max = duration;
|
|
|
|
done = TRUE;
|
|
|
|
}
|
|
|
|
/* else see if bigger than current max */
|
|
|
|
else if (duration > max)
|
|
|
|
max = duration;
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
g_value_reset (&item);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_ITERATOR_RESYNC:
|
|
|
|
max = -1;
|
|
|
|
res = TRUE;
|
|
|
|
gst_iterator_resync (it);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
res = FALSE;
|
|
|
|
done = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
g_value_unset (&item);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_iterator_free (it);
|
|
|
|
|
|
|
|
if (res) {
|
|
|
|
/* and store the max */
|
|
|
|
GST_DEBUG_OBJECT (mix, "Total duration in format %s: %"
|
|
|
|
GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
|
|
|
|
gst_query_set_duration (query, format, max);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
|
|
|
|
{
|
|
|
|
GstClockTime min, max;
|
|
|
|
gboolean live;
|
|
|
|
gboolean res;
|
|
|
|
GstIterator *it;
|
|
|
|
gboolean done;
|
2012-01-26 17:15:51 +00:00
|
|
|
GValue item = { 0 };
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
res = TRUE;
|
|
|
|
done = FALSE;
|
|
|
|
live = FALSE;
|
|
|
|
min = 0;
|
|
|
|
max = GST_CLOCK_TIME_NONE;
|
|
|
|
|
|
|
|
/* Take maximum of all latency values */
|
|
|
|
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
|
|
|
|
while (!done) {
|
2012-01-26 17:15:51 +00:00
|
|
|
switch (gst_iterator_next (it, &item)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
case GST_ITERATOR_DONE:
|
|
|
|
done = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_ITERATOR_OK:
|
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GstPad *pad = g_value_get_object (&item);
|
2010-07-26 14:07:15 +00:00
|
|
|
GstQuery *peerquery;
|
|
|
|
GstClockTime min_cur, max_cur;
|
|
|
|
gboolean live_cur;
|
|
|
|
|
|
|
|
peerquery = gst_query_new_latency ();
|
|
|
|
|
|
|
|
/* Ask peer for latency */
|
|
|
|
res &= gst_pad_peer_query (pad, peerquery);
|
|
|
|
|
|
|
|
/* take max from all valid return values */
|
|
|
|
if (res) {
|
|
|
|
gst_query_parse_latency (peerquery, &live_cur, &min_cur, &max_cur);
|
|
|
|
|
2015-02-11 12:53:02 +00:00
|
|
|
if (live_cur) {
|
|
|
|
if (min_cur > min)
|
|
|
|
min = min_cur;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2015-02-11 12:53:02 +00:00
|
|
|
if (max == GST_CLOCK_TIME_NONE)
|
|
|
|
max = max_cur;
|
|
|
|
else if (max_cur < max)
|
|
|
|
max = max_cur;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2015-02-11 12:53:02 +00:00
|
|
|
live = TRUE;
|
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
gst_query_unref (peerquery);
|
2012-01-26 17:15:51 +00:00
|
|
|
g_value_reset (&item);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_ITERATOR_RESYNC:
|
|
|
|
live = FALSE;
|
|
|
|
min = 0;
|
|
|
|
max = GST_CLOCK_TIME_NONE;
|
|
|
|
res = TRUE;
|
|
|
|
gst_iterator_resync (it);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
res = FALSE;
|
|
|
|
done = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
g_value_unset (&item);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_iterator_free (it);
|
|
|
|
|
2014-11-17 12:16:03 +00:00
|
|
|
mix->live = live;
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
if (res) {
|
|
|
|
/* store the results */
|
|
|
|
GST_DEBUG_OBJECT (mix, "Calculated total latency: live %s, min %"
|
|
|
|
GST_TIME_FORMAT ", max %" GST_TIME_FORMAT,
|
|
|
|
(live ? "yes" : "no"), GST_TIME_ARGS (min), GST_TIME_ARGS (max));
|
|
|
|
gst_query_set_latency (query, live, min, max);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_videomixer2_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
|
2010-07-26 14:07:15 +00:00
|
|
|
gboolean res = FALSE;
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_POSITION:
|
|
|
|
{
|
|
|
|
GstFormat format;
|
|
|
|
|
|
|
|
gst_query_parse_position (query, &format, NULL);
|
|
|
|
|
|
|
|
switch (format) {
|
|
|
|
case GST_FORMAT_TIME:
|
|
|
|
gst_query_set_position (query, format,
|
|
|
|
gst_segment_to_stream_time (&mix->segment, GST_FORMAT_TIME,
|
2012-01-25 17:40:03 +00:00
|
|
|
mix->segment.position));
|
2010-07-26 14:07:15 +00:00
|
|
|
res = TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_QUERY_DURATION:
|
|
|
|
res = gst_videomixer2_query_duration (mix, query);
|
|
|
|
break;
|
|
|
|
case GST_QUERY_LATENCY:
|
|
|
|
res = gst_videomixer2_query_latency (mix, query);
|
|
|
|
break;
|
2012-01-26 17:15:51 +00:00
|
|
|
case GST_QUERY_CAPS:
|
2013-07-26 17:40:53 +00:00
|
|
|
res = gst_pad_query_default (pad, parent, query);
|
2012-01-26 17:15:51 +00:00
|
|
|
break;
|
2010-07-26 14:07:15 +00:00
|
|
|
default:
|
|
|
|
/* FIXME, needs a custom query handler because we have multiple
|
|
|
|
* sinkpads */
|
|
|
|
res = FALSE;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_videomixer2_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
|
2010-07-26 14:07:15 +00:00
|
|
|
gboolean result;
|
|
|
|
|
|
|
|
switch (GST_EVENT_TYPE (event)) {
|
2012-01-26 17:15:51 +00:00
|
|
|
case GST_EVENT_QOS:
|
|
|
|
{
|
|
|
|
GstQOSType type;
|
2010-07-26 14:07:15 +00:00
|
|
|
GstClockTimeDiff diff;
|
|
|
|
GstClockTime timestamp;
|
|
|
|
gdouble proportion;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
gst_videomixer2_update_qos (mix, proportion, diff, timestamp);
|
|
|
|
|
|
|
|
result = gst_videomixer2_push_sink_event (mix, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_EVENT_SEEK:
|
|
|
|
{
|
|
|
|
gdouble rate;
|
|
|
|
GstFormat fmt;
|
|
|
|
GstSeekFlags flags;
|
|
|
|
GstSeekType start_type, stop_type;
|
|
|
|
gint64 start, stop;
|
|
|
|
GSList *l;
|
|
|
|
gdouble abs_rate;
|
|
|
|
|
|
|
|
/* parse the seek parameters */
|
|
|
|
gst_event_parse_seek (event, &rate, &fmt, &flags, &start_type,
|
|
|
|
&start, &stop_type, &stop);
|
|
|
|
|
|
|
|
if (rate <= 0.0) {
|
|
|
|
GST_ERROR_OBJECT (mix, "Negative rates not supported yet");
|
|
|
|
result = FALSE;
|
|
|
|
gst_event_unref (event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (mix, "Handling SEEK event");
|
|
|
|
|
|
|
|
abs_rate = ABS (rate);
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
for (l = mix->sinkpads; l; l = l->next) {
|
|
|
|
GstVideoMixer2Pad *p = l->data;
|
|
|
|
|
|
|
|
if (flags & GST_SEEK_FLAG_FLUSH) {
|
|
|
|
gst_buffer_replace (&p->mixcol->buffer, NULL);
|
|
|
|
p->mixcol->start_time = p->mixcol->end_time = -1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Convert to the output segment rate */
|
2012-01-25 17:40:03 +00:00
|
|
|
if (ABS (mix->segment.rate) != abs_rate) {
|
|
|
|
if (ABS (mix->segment.rate) != 1.0 && p->mixcol->buffer) {
|
|
|
|
p->mixcol->start_time /= ABS (mix->segment.rate);
|
|
|
|
p->mixcol->end_time /= ABS (mix->segment.rate);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
if (abs_rate != 1.0 && p->mixcol->buffer) {
|
|
|
|
p->mixcol->start_time *= abs_rate;
|
|
|
|
p->mixcol->end_time *= abs_rate;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_segment_do_seek (&mix->segment, rate, fmt, flags, start_type, start,
|
2010-07-26 14:07:15 +00:00
|
|
|
stop_type, stop, NULL);
|
2012-01-25 17:40:03 +00:00
|
|
|
mix->segment.position = -1;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->ts_offset = 0;
|
|
|
|
mix->nframes = 0;
|
|
|
|
mix->newseg_pending = TRUE;
|
|
|
|
|
|
|
|
gst_videomixer2_reset_qos (mix);
|
|
|
|
|
2014-02-19 02:04:03 +00:00
|
|
|
result = gst_collect_pads_src_event_default (mix->collect, pad, event);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_EVENT_NAVIGATION:
|
|
|
|
/* navigation is rather pointless. */
|
|
|
|
result = FALSE;
|
|
|
|
gst_event_unref (event);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
/* just forward the rest for now */
|
|
|
|
result = gst_videomixer2_push_sink_event (mix, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-01-26 17:15:51 +00:00
|
|
|
gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix, GstCaps * caps)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
gboolean ret = FALSE;
|
2012-01-26 17:15:51 +00:00
|
|
|
GstVideoInfo info;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps);
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
if (!gst_video_info_from_caps (&info, caps))
|
2010-07-26 14:07:15 +00:00
|
|
|
goto done;
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
|
2014-03-13 09:35:30 +00:00
|
|
|
mix->blend = NULL;
|
|
|
|
mix->overlay = NULL;
|
|
|
|
mix->fill_checker = NULL;
|
|
|
|
mix->fill_color = NULL;
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&info) ||
|
|
|
|
GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&info)) {
|
2012-01-25 17:40:03 +00:00
|
|
|
if (mix->segment.position != -1) {
|
|
|
|
mix->ts_offset = mix->segment.position - mix->segment.start;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->nframes = 0;
|
|
|
|
}
|
|
|
|
gst_videomixer2_reset_qos (mix);
|
|
|
|
}
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
mix->info = info;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
switch (GST_VIDEO_INFO_FORMAT (&mix->info)) {
|
2010-07-26 14:07:15 +00:00
|
|
|
case GST_VIDEO_FORMAT_AYUV:
|
|
|
|
mix->blend = gst_video_mixer_blend_ayuv;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = gst_video_mixer_overlay_ayuv;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_ayuv;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_ayuv;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_ARGB:
|
|
|
|
mix->blend = gst_video_mixer_blend_argb;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = gst_video_mixer_overlay_argb;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_argb;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_argb;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_BGRA:
|
|
|
|
mix->blend = gst_video_mixer_blend_bgra;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = gst_video_mixer_overlay_bgra;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_bgra;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_bgra;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_ABGR:
|
|
|
|
mix->blend = gst_video_mixer_blend_abgr;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = gst_video_mixer_overlay_abgr;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_abgr;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_abgr;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_RGBA:
|
|
|
|
mix->blend = gst_video_mixer_blend_rgba;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = gst_video_mixer_overlay_rgba;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_rgba;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_rgba;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_Y444:
|
|
|
|
mix->blend = gst_video_mixer_blend_y444;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_y444;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_y444;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_Y42B:
|
|
|
|
mix->blend = gst_video_mixer_blend_y42b;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_y42b;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_y42b;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_YUY2:
|
|
|
|
mix->blend = gst_video_mixer_blend_yuy2;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_yuy2;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_yuy2;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_UYVY:
|
|
|
|
mix->blend = gst_video_mixer_blend_uyvy;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_uyvy;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_uyvy;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_YVYU:
|
|
|
|
mix->blend = gst_video_mixer_blend_yvyu;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_yvyu;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_yvyu;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_I420:
|
|
|
|
mix->blend = gst_video_mixer_blend_i420;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_i420;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_i420;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_YV12:
|
|
|
|
mix->blend = gst_video_mixer_blend_yv12;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_yv12;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_yv12;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
2012-09-07 21:20:57 +00:00
|
|
|
case GST_VIDEO_FORMAT_NV12:
|
|
|
|
mix->blend = gst_video_mixer_blend_nv12;
|
|
|
|
mix->overlay = mix->blend;
|
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_nv12;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_nv12;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_NV21:
|
|
|
|
mix->blend = gst_video_mixer_blend_nv21;
|
|
|
|
mix->overlay = mix->blend;
|
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_nv21;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_nv21;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
2010-07-26 14:07:15 +00:00
|
|
|
case GST_VIDEO_FORMAT_Y41B:
|
|
|
|
mix->blend = gst_video_mixer_blend_y41b;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_y41b;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_y41b;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_RGB:
|
|
|
|
mix->blend = gst_video_mixer_blend_rgb;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_rgb;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_rgb;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_BGR:
|
|
|
|
mix->blend = gst_video_mixer_blend_bgr;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_bgr;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_bgr;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_xRGB:
|
|
|
|
mix->blend = gst_video_mixer_blend_xrgb;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_xrgb;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_xrgb;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_xBGR:
|
|
|
|
mix->blend = gst_video_mixer_blend_xbgr;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_xbgr;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_xbgr;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_RGBx:
|
|
|
|
mix->blend = gst_video_mixer_blend_rgbx;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_rgbx;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_rgbx;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
case GST_VIDEO_FORMAT_BGRx:
|
|
|
|
mix->blend = gst_video_mixer_blend_bgrx;
|
videomixer2: Add transparent background option for alpha channel formats
This option allows the videomixer2 element to output a valid alpha
channel when the inputs contain a valid alpha channel. This allows
mixing to occur in multiple stages serially.
The following pipeline shows an example of such a pipeline:
gst-launch videotestsrc background-color=0x000000 pattern=ball ! video/x-raw-yuv,format=\(fourcc\)AYUV ! videomixer2 background=transparent name=mix1 ! videomixer2 name=mix2 ! ffmpegcolorspace ! autovideosink videotestsrc ! video/x-raw-yuv,format=\(fourcc\)AYUV ! mix2.
The first videotestsrc in this pipeline creates a moving ball on a
transparent background. It is then passed to the first videomixer2.
Previously, this videomixer2 would have forced the alpha channel to
1.0 and given a background of checker, black, or white to the
stream. With this patch, however, you can now specify the background
as transparent, and the alpha channel of the input will be
preserved. This allows for further mixing downstream, as is shown in
the above pipeline where the a second videomixer2 is used to mix in a
background of an smpte videotestsrc. So the result is a ball hovering
over the smpte test source. This could, of course, have been
accomplished with a single mixer element, but staged mixing is useful
when it is not convenient to mix all video at once (e.g. a pipeline
where a foreground and background bin exist and are mixed at the final
output, but the foreground bin needs an internal mixer to create
transitions between clips).
Fixes bug #639994.
2011-01-19 19:07:17 +00:00
|
|
|
mix->overlay = mix->blend;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->fill_checker = gst_video_mixer_fill_checker_bgrx;
|
|
|
|
mix->fill_color = gst_video_mixer_fill_color_bgrx;
|
|
|
|
ret = TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
|
2013-05-18 18:36:39 +00:00
|
|
|
if (mix->current_caps == NULL ||
|
|
|
|
gst_caps_is_equal (caps, mix->current_caps) == FALSE) {
|
|
|
|
gst_caps_replace (&mix->current_caps, caps);
|
|
|
|
mix->send_caps = TRUE;
|
2013-05-08 14:02:05 +00:00
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
done:
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_videomixer2_sink_clip (GstCollectPads * pads,
|
|
|
|
GstCollectData * data, GstBuffer * buf, GstBuffer ** outbuf,
|
2011-10-28 08:40:36 +00:00
|
|
|
GstVideoMixer2 * mix)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (data->pad);
|
|
|
|
GstVideoMixer2Collect *mixcol = pad->mixcol;
|
|
|
|
GstClockTime start_time, end_time;
|
|
|
|
|
|
|
|
start_time = GST_BUFFER_TIMESTAMP (buf);
|
|
|
|
if (start_time == -1) {
|
|
|
|
GST_ERROR_OBJECT (pad, "Timestamped buffers required!");
|
2011-10-28 08:40:36 +00:00
|
|
|
gst_buffer_unref (buf);
|
2010-07-26 14:07:15 +00:00
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
end_time = GST_BUFFER_DURATION (buf);
|
2013-07-09 09:53:07 +00:00
|
|
|
if (end_time == -1 && GST_VIDEO_INFO_FPS_N (&pad->info) != 0)
|
2012-01-26 17:15:51 +00:00
|
|
|
end_time =
|
2013-07-19 20:59:15 +00:00
|
|
|
gst_util_uint64_scale_int_round (GST_SECOND,
|
2012-01-26 17:15:51 +00:00
|
|
|
GST_VIDEO_INFO_FPS_D (&pad->info), GST_VIDEO_INFO_FPS_N (&pad->info));
|
2011-10-28 08:40:36 +00:00
|
|
|
if (end_time == -1) {
|
|
|
|
*outbuf = buf;
|
2010-07-26 14:07:15 +00:00
|
|
|
return GST_FLOW_OK;
|
2011-10-28 08:40:36 +00:00
|
|
|
}
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
start_time = MAX (start_time, mixcol->collect.segment.start);
|
|
|
|
start_time =
|
|
|
|
gst_segment_to_running_time (&mixcol->collect.segment,
|
|
|
|
GST_FORMAT_TIME, start_time);
|
|
|
|
|
|
|
|
end_time += GST_BUFFER_TIMESTAMP (buf);
|
|
|
|
if (mixcol->collect.segment.stop != -1)
|
|
|
|
end_time = MIN (end_time, mixcol->collect.segment.stop);
|
|
|
|
end_time =
|
|
|
|
gst_segment_to_running_time (&mixcol->collect.segment,
|
|
|
|
GST_FORMAT_TIME, end_time);
|
|
|
|
|
|
|
|
/* Convert to the output segment rate */
|
2012-01-25 17:40:03 +00:00
|
|
|
if (ABS (mix->segment.rate) != 1.0) {
|
|
|
|
start_time *= ABS (mix->segment.rate);
|
|
|
|
end_time *= ABS (mix->segment.rate);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (mixcol->buffer != NULL && end_time < mixcol->end_time) {
|
2011-10-28 08:40:36 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
*outbuf = NULL;
|
|
|
|
return GST_FLOW_OK;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2011-10-28 08:40:36 +00:00
|
|
|
*outbuf = buf;
|
2010-07-26 14:07:15 +00:00
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
2014-02-19 02:04:03 +00:00
|
|
|
static void
|
|
|
|
gst_videomixer2_flush (GstCollectPads * pads, GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
if (mix->pending_tags) {
|
|
|
|
gst_tag_list_unref (mix->pending_tags);
|
|
|
|
mix->pending_tags = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
static gboolean
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_videomixer2_sink_event (GstCollectPads * pads, GstCollectData * cdata,
|
2010-07-26 14:07:15 +00:00
|
|
|
GstEvent * event, GstVideoMixer2 * mix)
|
|
|
|
{
|
|
|
|
GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (cdata->pad);
|
2013-05-05 19:24:49 +00:00
|
|
|
gboolean ret = TRUE, discard = FALSE;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2014-04-16 20:37:12 +00:00
|
|
|
GST_DEBUG_OBJECT (pad, "Got %s event: %" GST_PTR_FORMAT,
|
|
|
|
GST_EVENT_TYPE_NAME (event), event);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
switch (GST_EVENT_TYPE (event)) {
|
2012-01-26 17:43:06 +00:00
|
|
|
case GST_EVENT_CAPS:
|
|
|
|
{
|
|
|
|
GstCaps *caps;
|
|
|
|
|
|
|
|
gst_event_parse_caps (event, &caps);
|
|
|
|
ret =
|
|
|
|
gst_videomixer2_pad_sink_setcaps (GST_PAD (pad), GST_OBJECT (mix),
|
|
|
|
caps);
|
|
|
|
gst_event_unref (event);
|
2012-04-16 14:37:49 +00:00
|
|
|
event = NULL;
|
2012-01-26 17:43:06 +00:00
|
|
|
break;
|
|
|
|
}
|
2012-01-26 17:15:51 +00:00
|
|
|
case GST_EVENT_SEGMENT:{
|
2013-05-09 14:26:19 +00:00
|
|
|
GstSegment seg;
|
|
|
|
gst_event_copy_segment (event, &seg);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2013-05-09 14:26:19 +00:00
|
|
|
g_assert (seg.format == GST_FORMAT_TIME);
|
2014-07-01 11:50:31 +00:00
|
|
|
gst_videomixer2_reset_qos (mix);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_EVENT_FLUSH_STOP:
|
|
|
|
mix->newseg_pending = TRUE;
|
2013-05-05 19:24:49 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_videomixer2_reset_qos (mix);
|
|
|
|
gst_buffer_replace (&pad->mixcol->buffer, NULL);
|
|
|
|
pad->mixcol->start_time = -1;
|
|
|
|
pad->mixcol->end_time = -1;
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
mix->segment.position = -1;
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->ts_offset = 0;
|
|
|
|
mix->nframes = 0;
|
|
|
|
break;
|
2013-09-14 01:27:09 +00:00
|
|
|
case GST_EVENT_TAG:
|
|
|
|
{
|
|
|
|
/* collect tags here so we can push them out when we collect data */
|
|
|
|
GstTagList *tags;
|
|
|
|
|
|
|
|
gst_event_parse_tag (event, &tags);
|
|
|
|
tags = gst_tag_list_merge (mix->pending_tags, tags, GST_TAG_MERGE_APPEND);
|
|
|
|
if (mix->pending_tags)
|
|
|
|
gst_tag_list_unref (mix->pending_tags);
|
|
|
|
mix->pending_tags = tags;
|
|
|
|
event = NULL;
|
|
|
|
break;
|
|
|
|
}
|
2012-01-26 17:43:06 +00:00
|
|
|
default:
|
|
|
|
break;
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
2012-04-16 14:37:49 +00:00
|
|
|
if (event != NULL)
|
2013-05-05 19:24:49 +00:00
|
|
|
return gst_collect_pads_event_default (pads, cdata, event, discard);
|
2012-04-16 14:37:49 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-01-26 17:43:06 +00:00
|
|
|
forward_event_func (GValue * item, GValue * ret, GstEvent * event)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
2012-01-26 17:43:06 +00:00
|
|
|
GstPad *pad = g_value_get_object (item);
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_event_ref (event);
|
|
|
|
GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
|
|
|
|
if (!gst_pad_push_event (pad, event)) {
|
|
|
|
g_value_set_boolean (ret, FALSE);
|
|
|
|
GST_WARNING_OBJECT (pad, "Sending event %p (%s) failed.",
|
|
|
|
event, GST_EVENT_TYPE_NAME (event));
|
|
|
|
} else {
|
|
|
|
GST_LOG_OBJECT (pad, "Sent event %p (%s).",
|
|
|
|
event, GST_EVENT_TYPE_NAME (event));
|
|
|
|
}
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_videomixer2_push_sink_event (GstVideoMixer2 * mix, GstEvent * event)
|
|
|
|
{
|
|
|
|
GstIterator *it;
|
|
|
|
GValue vret = { 0 };
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (mix, "Forwarding event %p (%s)", event,
|
|
|
|
GST_EVENT_TYPE_NAME (event));
|
|
|
|
|
|
|
|
g_value_init (&vret, G_TYPE_BOOLEAN);
|
|
|
|
g_value_set_boolean (&vret, TRUE);
|
|
|
|
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
|
|
|
|
gst_iterator_fold (it, (GstIteratorFoldFunction) forward_event_func, &vret,
|
|
|
|
event);
|
|
|
|
gst_iterator_free (it);
|
|
|
|
gst_event_unref (event);
|
|
|
|
|
|
|
|
return g_value_get_boolean (&vret);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* GstElement vmethods */
|
|
|
|
static GstStateChangeReturn
|
|
|
|
gst_videomixer2_change_state (GstElement * element, GstStateChange transition)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (element);
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_READY_TO_PAUSED:
|
2013-04-25 18:59:52 +00:00
|
|
|
mix->send_stream_start = TRUE;
|
2013-05-18 18:36:39 +00:00
|
|
|
mix->send_caps = TRUE;
|
2013-05-20 17:51:07 +00:00
|
|
|
gst_segment_init (&mix->segment, GST_FORMAT_TIME);
|
2013-05-18 18:36:39 +00:00
|
|
|
gst_caps_replace (&mix->current_caps, NULL);
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_LOG_OBJECT (mix, "starting collectpads");
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_start (mix->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
|
|
|
GST_LOG_OBJECT (mix, "stopping collectpads");
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_stop (mix->collect);
|
2010-07-26 14:07:15 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
|
|
|
gst_videomixer2_reset (mix);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstPad *
|
|
|
|
gst_videomixer2_request_new_pad (GstElement * element,
|
2012-01-26 17:15:51 +00:00
|
|
|
GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix;
|
|
|
|
GstVideoMixer2Pad *mixpad;
|
|
|
|
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
|
|
|
|
|
|
|
|
mix = GST_VIDEO_MIXER2 (element);
|
|
|
|
|
2012-05-01 17:58:03 +00:00
|
|
|
if (templ == gst_element_class_get_pad_template (klass, "sink_%u")) {
|
|
|
|
guint serial = 0;
|
2010-07-26 14:07:15 +00:00
|
|
|
gchar *name = NULL;
|
|
|
|
GstVideoMixer2Collect *mixcol = NULL;
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
if (req_name == NULL || strlen (req_name) < 6
|
|
|
|
|| !g_str_has_prefix (req_name, "sink_")) {
|
|
|
|
/* no name given when requesting the pad, use next available int */
|
|
|
|
serial = mix->next_sinkpad++;
|
|
|
|
} else {
|
|
|
|
/* parse serial number from requested padname */
|
|
|
|
serial = g_ascii_strtoull (&req_name[5], NULL, 10);
|
|
|
|
if (serial >= mix->next_sinkpad)
|
|
|
|
mix->next_sinkpad = serial + 1;
|
|
|
|
}
|
|
|
|
/* create new pad with the name */
|
2012-05-01 17:58:03 +00:00
|
|
|
name = g_strdup_printf ("sink_%u", serial);
|
2010-07-26 14:07:15 +00:00
|
|
|
mixpad = g_object_new (GST_TYPE_VIDEO_MIXER2_PAD, "name", name, "direction",
|
|
|
|
templ->direction, "template", templ, NULL);
|
|
|
|
g_free (name);
|
|
|
|
|
|
|
|
mixpad->zorder = mix->numpads;
|
|
|
|
mixpad->xpos = DEFAULT_PAD_XPOS;
|
|
|
|
mixpad->ypos = DEFAULT_PAD_YPOS;
|
|
|
|
mixpad->alpha = DEFAULT_PAD_ALPHA;
|
|
|
|
|
|
|
|
mixcol = (GstVideoMixer2Collect *)
|
2012-09-12 19:05:44 +00:00
|
|
|
gst_collect_pads_add_pad (mix->collect, GST_PAD (mixpad),
|
2010-07-26 14:07:15 +00:00
|
|
|
sizeof (GstVideoMixer2Collect),
|
2012-04-17 13:14:27 +00:00
|
|
|
(GstCollectDataDestroyNotify) gst_videomixer2_collect_free, TRUE);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
/* Keep track of each other */
|
|
|
|
mixcol->mixpad = mixpad;
|
|
|
|
mixpad->mixcol = mixcol;
|
|
|
|
|
|
|
|
mixcol->start_time = -1;
|
|
|
|
mixcol->end_time = -1;
|
|
|
|
|
|
|
|
/* Keep an internal list of mixpads for zordering */
|
2013-05-28 02:43:25 +00:00
|
|
|
mix->sinkpads = g_slist_insert_sorted (mix->sinkpads, mixpad,
|
|
|
|
(GCompareFunc) pad_zorder_compare);
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->numpads++;
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
} else {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (element, "Adding pad %s", GST_PAD_NAME (mixpad));
|
|
|
|
|
|
|
|
/* add the pad to the element */
|
|
|
|
gst_element_add_pad (element, GST_PAD (mixpad));
|
2012-06-11 16:24:20 +00:00
|
|
|
gst_child_proxy_child_added (GST_CHILD_PROXY (mix), G_OBJECT (mixpad),
|
2012-03-31 13:43:49 +00:00
|
|
|
GST_OBJECT_NAME (mixpad));
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
return GST_PAD (mixpad);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_release_pad (GstElement * element, GstPad * pad)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = NULL;
|
|
|
|
GstVideoMixer2Pad *mixpad;
|
|
|
|
gboolean update_caps;
|
|
|
|
|
|
|
|
mix = GST_VIDEO_MIXER2 (element);
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
if (G_UNLIKELY (g_slist_find (mix->sinkpads, pad) == NULL)) {
|
|
|
|
g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
mixpad = GST_VIDEO_MIXER2_PAD (pad);
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
if (mixpad->convert)
|
2014-09-24 14:46:36 +00:00
|
|
|
gst_video_converter_free (mixpad->convert);
|
2014-08-14 05:59:00 +00:00
|
|
|
mixpad->convert = NULL;
|
2013-07-26 17:40:53 +00:00
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->sinkpads = g_slist_remove (mix->sinkpads, pad);
|
2012-06-11 16:24:20 +00:00
|
|
|
gst_child_proxy_child_removed (GST_CHILD_PROXY (mix), G_OBJECT (mixpad),
|
2012-03-31 13:43:49 +00:00
|
|
|
GST_OBJECT_NAME (mixpad));
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->numpads--;
|
|
|
|
|
2013-07-26 17:40:53 +00:00
|
|
|
GST_COLLECT_PADS_STREAM_LOCK (mix->collect);
|
|
|
|
gst_videomixer2_update_converters (mix);
|
|
|
|
GST_COLLECT_PADS_STREAM_UNLOCK (mix->collect);
|
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
update_caps = GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN;
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_remove_pad (mix->collect, pad);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
if (update_caps)
|
|
|
|
gst_videomixer2_update_src_caps (mix);
|
|
|
|
|
|
|
|
gst_element_remove_pad (element, pad);
|
|
|
|
return;
|
|
|
|
error:
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* GObject vmethods */
|
|
|
|
static void
|
|
|
|
gst_videomixer2_finalize (GObject * o)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
|
|
|
|
|
|
|
|
gst_object_unref (mix->collect);
|
2012-01-26 17:15:51 +00:00
|
|
|
g_mutex_clear (&mix->lock);
|
2012-09-13 00:10:00 +00:00
|
|
|
g_mutex_clear (&mix->setcaps_lock);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (o);
|
|
|
|
}
|
|
|
|
|
2013-05-18 18:36:39 +00:00
|
|
|
static void
|
|
|
|
gst_videomixer2_dispose (GObject * o)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
|
2013-07-26 17:40:53 +00:00
|
|
|
GSList *tmp;
|
|
|
|
|
|
|
|
for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
|
|
|
|
GstVideoMixer2Pad *mixpad = tmp->data;
|
|
|
|
|
|
|
|
if (mixpad->convert)
|
2014-09-24 14:46:36 +00:00
|
|
|
gst_video_converter_free (mixpad->convert);
|
2014-08-14 05:59:00 +00:00
|
|
|
mixpad->convert = NULL;
|
2013-07-26 17:40:53 +00:00
|
|
|
}
|
2013-05-18 18:36:39 +00:00
|
|
|
|
2013-09-14 01:27:09 +00:00
|
|
|
if (mix->pending_tags) {
|
|
|
|
gst_tag_list_unref (mix->pending_tags);
|
|
|
|
mix->pending_tags = NULL;
|
|
|
|
}
|
|
|
|
|
2013-05-18 18:36:39 +00:00
|
|
|
gst_caps_replace (&mix->current_caps, NULL);
|
2014-04-16 15:30:59 +00:00
|
|
|
|
|
|
|
G_OBJECT_CLASS (parent_class)->dispose (o);
|
2013-05-18 18:36:39 +00:00
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
static void
|
|
|
|
gst_videomixer2_get_property (GObject * object,
|
|
|
|
guint prop_id, GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_BACKGROUND:
|
|
|
|
g_value_set_enum (value, mix->background);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_set_property (GObject * object,
|
|
|
|
guint prop_id, const GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (object);
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
case PROP_BACKGROUND:
|
|
|
|
mix->background = g_value_get_enum (value);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* GstChildProxy implementation */
|
2012-03-31 13:43:49 +00:00
|
|
|
static GObject *
|
2010-07-26 14:07:15 +00:00
|
|
|
gst_videomixer2_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
|
|
|
|
guint index)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (child_proxy);
|
2012-03-31 13:43:49 +00:00
|
|
|
GObject *obj;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
if ((obj = g_slist_nth_data (mix->sinkpads, index)))
|
2012-03-31 13:43:49 +00:00
|
|
|
g_object_ref (obj);
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
|
|
|
static guint
|
|
|
|
gst_videomixer2_child_proxy_get_children_count (GstChildProxy * child_proxy)
|
|
|
|
{
|
|
|
|
guint count = 0;
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (child_proxy);
|
|
|
|
|
|
|
|
GST_VIDEO_MIXER2_LOCK (mix);
|
|
|
|
count = mix->numpads;
|
|
|
|
GST_VIDEO_MIXER2_UNLOCK (mix);
|
|
|
|
GST_INFO_OBJECT (mix, "Children Count: %d", count);
|
|
|
|
return count;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_videomixer2_child_proxy_init (gpointer g_iface, gpointer iface_data)
|
|
|
|
{
|
|
|
|
GstChildProxyInterface *iface = g_iface;
|
|
|
|
|
|
|
|
GST_INFO ("intializing child proxy interface");
|
|
|
|
iface->get_child_by_index = gst_videomixer2_child_proxy_get_child_by_index;
|
|
|
|
iface->get_children_count = gst_videomixer2_child_proxy_get_children_count;
|
|
|
|
}
|
|
|
|
|
2014-04-16 20:40:45 +00:00
|
|
|
static void
|
|
|
|
gst_videomixer2_constructed (GObject * obj)
|
|
|
|
{
|
|
|
|
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (obj);
|
|
|
|
gchar *cp_name;
|
|
|
|
|
|
|
|
cp_name = g_strconcat (GST_OBJECT_NAME (obj), "-collectpads", NULL);
|
|
|
|
gst_object_set_name (GST_OBJECT (mix->collect), cp_name);
|
|
|
|
g_free (cp_name);
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (gst_videomixer2_parent_class)->constructed (obj);
|
|
|
|
}
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
/* GObject boilerplate */
|
|
|
|
static void
|
|
|
|
gst_videomixer2_class_init (GstVideoMixer2Class * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class = (GObjectClass *) klass;
|
|
|
|
GstElementClass *gstelement_class = (GstElementClass *) klass;
|
|
|
|
|
2014-04-16 20:40:45 +00:00
|
|
|
gobject_class->constructed = gst_videomixer2_constructed;
|
2010-07-26 14:07:15 +00:00
|
|
|
gobject_class->finalize = gst_videomixer2_finalize;
|
2013-05-18 18:36:39 +00:00
|
|
|
gobject_class->dispose = gst_videomixer2_dispose;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
|
|
|
gobject_class->get_property = gst_videomixer2_get_property;
|
|
|
|
gobject_class->set_property = gst_videomixer2_set_property;
|
|
|
|
|
|
|
|
g_object_class_install_property (gobject_class, PROP_BACKGROUND,
|
|
|
|
g_param_spec_enum ("background", "Background", "Background type",
|
|
|
|
GST_TYPE_VIDEO_MIXER2_BACKGROUND,
|
|
|
|
DEFAULT_BACKGROUND, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
|
|
|
|
|
|
|
gstelement_class->request_new_pad =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_videomixer2_request_new_pad);
|
|
|
|
gstelement_class->release_pad =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_videomixer2_release_pad);
|
|
|
|
gstelement_class->change_state =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_videomixer2_change_state);
|
|
|
|
|
2012-01-25 17:40:03 +00:00
|
|
|
gst_element_class_add_pad_template (gstelement_class,
|
|
|
|
gst_static_pad_template_get (&src_factory));
|
|
|
|
gst_element_class_add_pad_template (gstelement_class,
|
|
|
|
gst_static_pad_template_get (&sink_factory));
|
|
|
|
|
2012-04-09 23:51:41 +00:00
|
|
|
gst_element_class_set_static_metadata (gstelement_class, "Video mixer 2",
|
2014-06-16 09:35:39 +00:00
|
|
|
"Filter/Editor/Video/Compositor",
|
2012-01-25 17:40:03 +00:00
|
|
|
"Mix multiple video streams", "Wim Taymans <wim@fluendo.com>, "
|
|
|
|
"Sebastian Dröge <sebastian.droege@collabora.co.uk>");
|
|
|
|
|
2010-07-26 14:07:15 +00:00
|
|
|
/* Register the pad class */
|
2010-09-15 19:36:33 +00:00
|
|
|
g_type_class_ref (GST_TYPE_VIDEO_MIXER2_PAD);
|
2010-07-26 14:07:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2012-01-25 17:40:03 +00:00
|
|
|
gst_videomixer2_init (GstVideoMixer2 * mix)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
|
|
|
GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix);
|
|
|
|
|
|
|
|
mix->srcpad =
|
|
|
|
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
|
|
|
|
"src"), "src");
|
|
|
|
gst_pad_set_query_function (GST_PAD (mix->srcpad),
|
|
|
|
GST_DEBUG_FUNCPTR (gst_videomixer2_src_query));
|
|
|
|
gst_pad_set_event_function (GST_PAD (mix->srcpad),
|
|
|
|
GST_DEBUG_FUNCPTR (gst_videomixer2_src_event));
|
|
|
|
gst_element_add_pad (GST_ELEMENT (mix), mix->srcpad);
|
|
|
|
|
2012-04-17 13:14:27 +00:00
|
|
|
mix->collect = gst_collect_pads_new ();
|
2014-02-19 02:04:03 +00:00
|
|
|
gst_collect_pads_set_flush_function (mix->collect,
|
|
|
|
(GstCollectPadsFlushFunction) gst_videomixer2_flush, mix);
|
2010-07-26 14:07:15 +00:00
|
|
|
mix->background = DEFAULT_BACKGROUND;
|
2013-05-18 18:36:39 +00:00
|
|
|
mix->current_caps = NULL;
|
2013-09-14 01:27:09 +00:00
|
|
|
mix->pending_tags = NULL;
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_set_function (mix->collect,
|
|
|
|
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_videomixer2_collected),
|
2010-07-26 14:07:15 +00:00
|
|
|
mix);
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_set_event_function (mix->collect,
|
|
|
|
(GstCollectPadsEventFunction) gst_videomixer2_sink_event, mix);
|
2012-09-17 17:17:00 +00:00
|
|
|
gst_collect_pads_set_query_function (mix->collect,
|
|
|
|
(GstCollectPadsQueryFunction) gst_videomixer2_sink_query, mix);
|
2012-04-17 13:14:27 +00:00
|
|
|
gst_collect_pads_set_clip_function (mix->collect,
|
|
|
|
(GstCollectPadsClipFunction) gst_videomixer2_sink_clip, mix);
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-26 17:15:51 +00:00
|
|
|
g_mutex_init (&mix->lock);
|
2012-09-13 00:10:00 +00:00
|
|
|
g_mutex_init (&mix->setcaps_lock);
|
2010-07-26 14:07:15 +00:00
|
|
|
/* initialize variables */
|
|
|
|
gst_videomixer2_reset (mix);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Element registration */
|
2012-01-09 13:51:44 +00:00
|
|
|
static gboolean
|
|
|
|
plugin_init (GstPlugin * plugin)
|
2010-07-26 14:07:15 +00:00
|
|
|
{
|
2012-01-26 17:15:51 +00:00
|
|
|
GST_DEBUG_CATEGORY_INIT (gst_videomixer2_debug, "videomixer", 0,
|
2012-01-09 13:51:44 +00:00
|
|
|
"video mixer");
|
|
|
|
|
|
|
|
gst_video_mixer_init_blend ();
|
2010-07-26 14:07:15 +00:00
|
|
|
|
2012-01-09 13:51:44 +00:00
|
|
|
return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY,
|
2010-07-26 14:07:15 +00:00
|
|
|
GST_TYPE_VIDEO_MIXER2);
|
|
|
|
}
|
2012-01-09 13:51:44 +00:00
|
|
|
|
|
|
|
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
|
|
|
|
GST_VERSION_MINOR,
|
2012-04-05 15:36:38 +00:00
|
|
|
videomixer,
|
2012-01-09 13:51:44 +00:00
|
|
|
"Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
|
|
|
|
GST_PACKAGE_ORIGIN)
|