videomixer: port to 0.11

It builds and gst-inspect-0.11 works.. otherwise untested
This commit is contained in:
Wim Taymans 2012-01-26 18:15:51 +01:00
parent bd032ae3bf
commit df260a2ec6
5 changed files with 193 additions and 134 deletions

View file

@ -313,7 +313,7 @@ dnl Non ported plugins (non-dependant, then dependant)
dnl Make sure you have a space before and after all plugins dnl Make sure you have a space before and after all plugins
GST_PLUGINS_NONPORTED="deinterlace interleave flx goom2k1 \ GST_PLUGINS_NONPORTED="deinterlace interleave flx goom2k1 \
imagefreeze interleave monoscope smpte \ imagefreeze interleave monoscope smpte \
videobox videomixer \ videobox \
cairo cairo_gobject dv1394 gdk_pixbuf libdv \ cairo cairo_gobject dv1394 gdk_pixbuf libdv \
oss oss4 shout2 \ oss oss4 shout2 \
taglib wavpack \ taglib wavpack \

View file

@ -44,14 +44,16 @@ GST_DEBUG_CATEGORY_STATIC (gst_videomixer_blend_debug);
#define BLEND_A32(name, method, LOOP) \ #define BLEND_A32(name, method, LOOP) \
static void \ static void \
method##_ ##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \ method##_ ##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
gint src_width, gint src_height, gdouble src_alpha, \ gdouble src_alpha, GstVideoFrame * destframe) \
GstVideoFrame * destframe) \
{ \ { \
guint s_alpha; \ guint s_alpha; \
gint src_stride, dest_stride; \ gint src_stride, dest_stride; \
gint dest_width, dest_height; \ gint dest_width, dest_height; \
guint8 *src, *dest; \ guint8 *src, *dest; \
gint src_width, src_height; \
\ \
src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
@ -229,13 +231,12 @@ _blend_##format_name (const guint8 * src, guint8 * dest, \
\ \
static void \ static void \
blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \ blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
gint src_width, gint src_height, gdouble src_alpha, \ gdouble src_alpha, GstVideoFrame * destframe) \
GstVideoFrame * destframe) \
{ \ { \
const guint8 *b_src; \ const guint8 *b_src; \
guint8 *b_dest; \ guint8 *b_dest; \
gint b_src_width = src_width; \ gint b_src_width; \
gint b_src_height = src_height; \ gint b_src_height; \
gint xoffset = 0; \ gint xoffset = 0; \
gint yoffset = 0; \ gint yoffset = 0; \
gint src_comp_rowstride, dest_comp_rowstride; \ gint src_comp_rowstride, dest_comp_rowstride; \
@ -245,6 +246,10 @@ blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
gint comp_yoffset, comp_xoffset; \ gint comp_yoffset, comp_xoffset; \
gint dest_width, dest_height; \ gint dest_width, dest_height; \
const GstVideoFormatInfo *info; \ const GstVideoFormatInfo *info; \
gint src_width, src_height; \
\
src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
\ \
info = srcframe->info.finfo; \ info = srcframe->info.finfo; \
dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
@ -253,6 +258,9 @@ blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
xpos = x_round (xpos); \ xpos = x_round (xpos); \
ypos = y_round (ypos); \ ypos = y_round (ypos); \
\ \
b_src_width = src_width; \
b_src_height = src_height; \
\
/* adjust src pointers for negative sizes */ \ /* adjust src pointers for negative sizes */ \
if (xpos < 0) { \ if (xpos < 0) { \
xoffset = -xpos; \ xoffset = -xpos; \
@ -439,14 +447,17 @@ PLANAR_YUV_FILL_COLOR (y41b, GST_VIDEO_FORMAT_Y41B, memset);
#define RGB_BLEND(name, bpp, MEMCPY, BLENDLOOP) \ #define RGB_BLEND(name, bpp, MEMCPY, BLENDLOOP) \
static void \ static void \
blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \ blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
gint src_width, gint src_height, gdouble src_alpha, \ gdouble src_alpha, GstVideoFrame * destframe) \
GstVideoFrame * destframe) \
{ \ { \
gint b_alpha; \ gint b_alpha; \
gint i; \ gint i; \
gint src_stride, dest_stride; \ gint src_stride, dest_stride; \
gint dest_width, dest_height; \ gint dest_width, dest_height; \
guint8 *dest, *src; \ guint8 *dest, *src; \
gint src_width, src_height; \
\
src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
\ \
src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
@ -602,14 +613,17 @@ RGB_FILL_COLOR (bgrx, 4, _memset_bgrx);
#define PACKED_422_BLEND(name, MEMCPY, BLENDLOOP) \ #define PACKED_422_BLEND(name, MEMCPY, BLENDLOOP) \
static void \ static void \
blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \ blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
gint src_width, gint src_height, gdouble src_alpha, \ gdouble src_alpha, GstVideoFrame * destframe) \
GstVideoFrame * destframe) \
{ \ { \
gint b_alpha; \ gint b_alpha; \
gint i; \ gint i; \
gint src_stride, dest_stride; \ gint src_stride, dest_stride; \
gint dest_width, dest_height; \ gint dest_width, dest_height; \
guint8 *src, *dest; \ guint8 *src, *dest; \
gint src_width, src_height; \
\
src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
\ \
dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \

View file

@ -23,7 +23,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h> #include <gst/video/video.h>
typedef void (*BlendFunction) (GstVideoFrame *srcframe, gint xpos, gint ypos, gint src_width, gint src_height, gdouble src_alpha, GstVideoFrame * destframe); typedef void (*BlendFunction) (GstVideoFrame *srcframe, gint xpos, gint ypos, gdouble src_alpha, GstVideoFrame * destframe);
typedef void (*FillCheckerFunction) (GstVideoFrame * frame); typedef void (*FillCheckerFunction) (GstVideoFrame * frame);
typedef void (*FillColorFunction) (GstVideoFrame * frame, gint c1, gint c2, gint c3); typedef void (*FillColorFunction) (GstVideoFrame * frame, gint c1, gint c2, gint c3);

View file

@ -101,7 +101,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_videomixer2_debug);
#define GST_CAT_DEFAULT gst_videomixer2_debug #define GST_CAT_DEFAULT gst_videomixer2_debug
#define GST_VIDEO_MIXER2_GET_LOCK(mix) \ #define GST_VIDEO_MIXER2_GET_LOCK(mix) \
(GST_VIDEO_MIXER2(mix)->lock) (&GST_VIDEO_MIXER2(mix)->lock)
#define GST_VIDEO_MIXER2_LOCK(mix) \ #define GST_VIDEO_MIXER2_LOCK(mix) \
(g_mutex_lock(GST_VIDEO_MIXER2_GET_LOCK (mix))) (g_mutex_lock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
#define GST_VIDEO_MIXER2_UNLOCK(mix) \ #define GST_VIDEO_MIXER2_UNLOCK(mix) \
@ -165,6 +165,9 @@ gst_videomixer2_collect_free (GstCollectData2 * data)
gst_buffer_replace (&cdata->buffer, NULL); gst_buffer_replace (&cdata->buffer, NULL);
} }
static gboolean gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix,
GstCaps * caps);
static gboolean static gboolean
gst_videomixer2_update_src_caps (GstVideoMixer2 * mix) gst_videomixer2_update_src_caps (GstVideoMixer2 * mix)
{ {
@ -271,7 +274,7 @@ gst_videomixer2_update_src_caps (GstVideoMixer2 * mix)
mix->info = info; mix->info = info;
GST_VIDEO_MIXER2_UNLOCK (mix); GST_VIDEO_MIXER2_UNLOCK (mix);
ret = gst_pad_set_caps (mix->srcpad, caps); ret = gst_videomixer2_src_setcaps (mix->srcpad, mix, caps);
gst_caps_unref (caps); gst_caps_unref (caps);
} else { } else {
GST_VIDEO_MIXER2_UNLOCK (mix); GST_VIDEO_MIXER2_UNLOCK (mix);
@ -283,7 +286,8 @@ done:
static gboolean static gboolean
gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstCaps * caps) gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstObject * parent,
GstCaps * caps)
{ {
GstVideoMixer2 *mix; GstVideoMixer2 *mix;
GstVideoMixer2Pad *mixpad; GstVideoMixer2Pad *mixpad;
@ -292,7 +296,7 @@ gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstCaps * caps)
GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps); GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); mix = GST_VIDEO_MIXER2 (parent);
mixpad = GST_VIDEO_MIXER2_PAD (pad); mixpad = GST_VIDEO_MIXER2_PAD (pad);
if (gst_video_info_from_caps (&info, caps)) { if (gst_video_info_from_caps (&info, caps)) {
@ -319,22 +323,47 @@ gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstCaps * caps)
ret = gst_videomixer2_update_src_caps (mix); ret = gst_videomixer2_update_src_caps (mix);
beach: beach:
gst_object_unref (mix); return ret;
}
static gboolean
gst_videomixer2_pad_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
gboolean ret = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
ret = gst_videomixer2_pad_sink_setcaps (pad, parent, caps);
gst_event_unref (event);
break;
}
default:
ret = gst_pad_event_default (pad, parent, event);
break;
}
return ret; return ret;
} }
static GstCaps * static GstCaps *
gst_videomixer2_pad_sink_getcaps (GstPad * pad) gst_videomixer2_pad_sink_getcaps (GstPad * pad, GstObject * parent,
GstCaps * filter)
{ {
GstVideoMixer2 *mix; GstVideoMixer2 *mix;
GstCaps *srccaps; GstCaps *srccaps;
GstStructure *s; GstStructure *s;
gint i, n; gint i, n;
mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); mix = GST_VIDEO_MIXER2 (parent);
srccaps = gst_pad_get_current_caps (GST_PAD (mix->srcpad)); srccaps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
if (srccaps == NULL)
srccaps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
srccaps = gst_caps_make_writable (srccaps); srccaps = gst_caps_make_writable (srccaps);
n = gst_caps_get_size (srccaps); n = gst_caps_get_size (srccaps);
@ -350,13 +379,12 @@ gst_videomixer2_pad_sink_getcaps (GstPad * pad)
GST_DEBUG_OBJECT (pad, "Returning %" GST_PTR_FORMAT, srccaps); GST_DEBUG_OBJECT (pad, "Returning %" GST_PTR_FORMAT, srccaps);
gst_object_unref (mix);
return srccaps; return srccaps;
} }
static gboolean static gboolean
gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstCaps * caps) gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstObject * parent,
GstCaps * caps)
{ {
gboolean ret; gboolean ret;
GstVideoMixer2 *mix; GstVideoMixer2 *mix;
@ -364,10 +392,13 @@ gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstCaps * caps)
gint i, n; gint i, n;
GstStructure *s; GstStructure *s;
mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); mix = GST_VIDEO_MIXER2 (parent);
GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
accepted_caps = gst_pad_get_current_caps (GST_PAD (mix->srcpad)); accepted_caps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
if (accepted_caps == NULL)
accepted_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
accepted_caps = gst_caps_make_writable (accepted_caps); accepted_caps = gst_caps_make_writable (accepted_caps);
GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps); GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
@ -386,10 +417,43 @@ gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstCaps * caps)
GST_INFO_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, (ret ? "" : "not "), GST_INFO_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, (ret ? "" : "not "),
caps); caps);
GST_INFO_OBJECT (pad, "acceptable caps are %" GST_PTR_FORMAT, accepted_caps); GST_INFO_OBJECT (pad, "acceptable caps are %" GST_PTR_FORMAT, accepted_caps);
gst_caps_unref (accepted_caps); gst_caps_unref (accepted_caps);
gst_object_unref (mix); return ret;
}
static gboolean
gst_videomixer2_pad_sink_query (GstPad * pad, GstObject * parent,
GstQuery * query)
{
gboolean ret = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:
{
GstCaps *filter, *caps;
gst_query_parse_caps (query, &filter);
caps = gst_videomixer2_pad_sink_getcaps (pad, parent, filter);
gst_query_set_caps_result (query, caps);
gst_caps_unref (caps);
ret = TRUE;
break;
}
case GST_QUERY_ACCEPT_CAPS:
{
GstCaps *caps;
gst_query_parse_accept_caps (query, &caps);
ret = gst_videomixer2_pad_sink_acceptcaps (pad, parent, caps);
gst_query_set_accept_caps_result (query, ret);
ret = TRUE;
break;
}
default:
ret = gst_pad_query_default (pad, parent, query);
break;
}
return ret; return ret;
} }
@ -488,12 +552,10 @@ static void
gst_videomixer2_pad_init (GstVideoMixer2Pad * mixerpad) gst_videomixer2_pad_init (GstVideoMixer2Pad * mixerpad)
{ {
/* setup some pad functions */ /* setup some pad functions */
gst_pad_set_setcaps_function (GST_PAD (mixerpad), gst_pad_set_event_function (GST_PAD (mixerpad),
gst_videomixer2_pad_sink_setcaps); gst_videomixer2_pad_sink_event);
gst_pad_set_acceptcaps_function (GST_PAD (mixerpad), gst_pad_set_query_function (GST_PAD (mixerpad),
GST_DEBUG_FUNCPTR (gst_videomixer2_pad_sink_acceptcaps)); gst_videomixer2_pad_sink_query);
gst_pad_set_getcaps_function (GST_PAD (mixerpad),
gst_videomixer2_pad_sink_getcaps);
mixerpad->zorder = DEFAULT_PAD_ZORDER; mixerpad->zorder = DEFAULT_PAD_ZORDER;
mixerpad->xpos = DEFAULT_PAD_XPOS; mixerpad->xpos = DEFAULT_PAD_XPOS;
@ -583,10 +645,7 @@ gst_videomixer2_reset (GstVideoMixer2 * mix)
{ {
GSList *l; GSList *l;
mix->format = GST_VIDEO_FORMAT_UNKNOWN; gst_video_info_init (&mix->info);
mix->width = mix->height = 0;
mix->fps_n = mix->fps_d = 0;
mix->par_n = mix->par_d = 0;
mix->ts_offset = 0; mix->ts_offset = 0;
mix->nframes = 0; mix->nframes = 0;
@ -603,8 +662,7 @@ gst_videomixer2_reset (GstVideoMixer2 * mix)
mixcol->start_time = -1; mixcol->start_time = -1;
mixcol->end_time = -1; mixcol->end_time = -1;
p->fps_n = p->fps_d = 0; gst_video_info_init (&p->info);
p->width = p->height = 0;
} }
mix->newseg_pending = TRUE; mix->newseg_pending = TRUE;
@ -785,37 +843,32 @@ gst_videomixer2_blend_buffers (GstVideoMixer2 * mix,
GstBuffer ** outbuf) GstBuffer ** outbuf)
{ {
GSList *l; GSList *l;
GstFlowReturn ret;
guint outsize; guint outsize;
BlendFunction composite; BlendFunction composite;
GstVideoFrame outframe;
outsize = gst_video_format_get_size (mix->format, mix->width, mix->height); outsize = GST_VIDEO_INFO_SIZE (&mix->info);
ret = gst_pad_alloc_buffer_and_set_caps (mix->srcpad, GST_BUFFER_OFFSET_NONE,
outsize, GST_PAD_CAPS (mix->srcpad), outbuf);
if (ret != GST_FLOW_OK)
return ret;
*outbuf = gst_buffer_new_allocate (NULL, outsize, 15);
GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time; GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time; GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
gst_video_frame_map (&outframe, &mix->info, *outbuf, GST_MAP_READWRITE);
/* default to blending */ /* default to blending */
composite = mix->blend; composite = mix->blend;
switch (mix->background) { switch (mix->background) {
case VIDEO_MIXER2_BACKGROUND_CHECKER: case VIDEO_MIXER2_BACKGROUND_CHECKER:
mix->fill_checker (GST_BUFFER_DATA (*outbuf), mix->width, mix->height); mix->fill_checker (&outframe);
break; break;
case VIDEO_MIXER2_BACKGROUND_BLACK: case VIDEO_MIXER2_BACKGROUND_BLACK:
mix->fill_color (GST_BUFFER_DATA (*outbuf), mix->width, mix->fill_color (&outframe, 16, 128, 128);
mix->height, 16, 128, 128);
break; break;
case VIDEO_MIXER2_BACKGROUND_WHITE: case VIDEO_MIXER2_BACKGROUND_WHITE:
mix->fill_color (GST_BUFFER_DATA (*outbuf), mix->width, mix->fill_color (&outframe, 240, 128, 128);
mix->height, 240, 128, 128);
break; break;
case VIDEO_MIXER2_BACKGROUND_TRANSPARENT: case VIDEO_MIXER2_BACKGROUND_TRANSPARENT:
orc_memset (GST_BUFFER_DATA (*outbuf), 0, gst_buffer_memset (*outbuf, 0, 0, outsize);
gst_video_format_get_row_stride (mix->format, 0,
mix->width) * mix->height);
/* use overlay to keep background transparent */ /* use overlay to keep background transparent */
composite = mix->overlay; composite = mix->overlay;
break; break;
@ -829,6 +882,7 @@ gst_videomixer2_blend_buffers (GstVideoMixer2 * mix,
GstClockTime timestamp; GstClockTime timestamp;
gint64 stream_time; gint64 stream_time;
GstSegment *seg; GstSegment *seg;
GstVideoFrame frame;
seg = &mixcol->collect.segment; seg = &mixcol->collect.segment;
@ -841,11 +895,14 @@ gst_videomixer2_blend_buffers (GstVideoMixer2 * mix,
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (pad), stream_time); gst_object_sync_values (GST_OBJECT (pad), stream_time);
composite (GST_BUFFER_DATA (mixcol->buffer), gst_video_frame_map (&frame, &pad->info, mixcol->buffer, GST_MAP_READ);
pad->xpos, pad->ypos, pad->width, pad->height, pad->alpha,
GST_BUFFER_DATA (*outbuf), mix->width, mix->height); composite (&frame, pad->xpos, pad->ypos, pad->alpha, &outframe);
gst_video_frame_unmap (&frame);
} }
} }
gst_video_frame_unmap (&outframe);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@ -902,22 +959,20 @@ gst_videomixer2_collected (GstCollectPads2 * pads, GstVideoMixer2 * mix)
gint64 jitter; gint64 jitter;
/* If we're not negotiated yet... */ /* If we're not negotiated yet... */
if (mix->format == GST_VIDEO_FORMAT_UNKNOWN) if (GST_VIDEO_INFO_FORMAT (&mix->info) == GST_VIDEO_FORMAT_UNKNOWN)
return GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, FALSE)) { if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, FALSE)) {
GST_DEBUG_OBJECT (mix, "pending flush stop"); GST_DEBUG_OBJECT (mix, "pending flush stop");
gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ()); gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop (TRUE));
} }
GST_VIDEO_MIXER2_LOCK (mix); GST_VIDEO_MIXER2_LOCK (mix);
if (mix->newseg_pending) { if (mix->newseg_pending) {
GST_DEBUG_OBJECT (mix, "Sending NEWSEGMENT event"); GST_DEBUG_OBJECT (mix, "Sending NEWSEGMENT event");
if (!gst_pad_push_event (mix->srcpad, gst_event_new_new_segment_full (FALSE, if (!gst_pad_push_event (mix->srcpad,
mix->segment.rate, mix->segment.applied_rate, gst_event_new_segment (&mix->segment))) {
mix->segment.format, mix->segment.start, mix->segment.stop,
mix->segment.time))) {
ret = GST_FLOW_ERROR; ret = GST_FLOW_ERROR;
goto done; goto done;
} }
@ -938,7 +993,8 @@ gst_videomixer2_collected (GstCollectPads2 * pads, GstVideoMixer2 * mix)
output_end_time = output_end_time =
mix->ts_offset + gst_util_uint64_scale (mix->nframes + 1, mix->ts_offset + gst_util_uint64_scale (mix->nframes + 1,
GST_SECOND * mix->fps_d, mix->fps_n); GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
GST_VIDEO_INFO_FPS_N (&mix->info));
if (mix->segment.stop != -1) if (mix->segment.stop != -1)
output_end_time = MIN (output_end_time, mix->segment.stop); output_end_time = MIN (output_end_time, mix->segment.stop);
@ -1004,39 +1060,43 @@ done:
return ret; return ret;
} }
static GstCaps * static gboolean
gst_videomixer2_src_getcaps (GstPad * pad) gst_videomixer2_query_caps (GstPad * pad, GstObject * parent, GstQuery * query)
{ {
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); GstCaps *filter, *caps;
GstCaps *caps; GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
GstStructure *s; GstStructure *s;
gint n; gint n;
if (mix->format != GST_VIDEO_FORMAT_UNKNOWN) { gst_query_parse_caps (query, &filter);
caps = gst_caps_copy (GST_PAD_CAPS (mix->srcpad));
if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN) {
caps = gst_pad_get_current_caps (mix->srcpad);
} else { } else {
caps = gst_caps_copy (gst_pad_get_pad_template_caps (mix->srcpad)); caps = gst_pad_get_pad_template_caps (mix->srcpad);
} }
caps = gst_caps_make_writable (caps);
n = gst_caps_get_size (caps) - 1; n = gst_caps_get_size (caps) - 1;
for (; n >= 0; n--) { for (; n >= 0; n--) {
s = gst_caps_get_structure (caps, n); s = gst_caps_get_structure (caps, n);
gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
if (mix->fps_d != 0) { if (GST_VIDEO_INFO_FPS_D (&mix->info) != 0) {
gst_structure_set (s, gst_structure_set (s,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
} }
} }
gst_query_set_caps_result (query, caps);
gst_object_unref (mix); return TRUE;
return caps;
} }
static gboolean static gboolean
gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query) gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
{ {
GValue item = { 0 };
gint64 max; gint64 max;
gboolean res; gboolean res;
GstFormat format; GstFormat format;
@ -1053,21 +1113,19 @@ gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
/* Take maximum of all durations */ /* Take maximum of all durations */
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix)); it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
while (!done) { while (!done) {
GstIteratorResult ires; switch (gst_iterator_next (it, &item)) {
gpointer item;
ires = gst_iterator_next (it, &item);
switch (ires) {
case GST_ITERATOR_DONE: case GST_ITERATOR_DONE:
done = TRUE; done = TRUE;
break; break;
case GST_ITERATOR_OK: case GST_ITERATOR_OK:
{ {
GstPad *pad = GST_PAD_CAST (item); GstPad *pad;
gint64 duration; gint64 duration;
pad = g_value_get_object (&item);
/* ask sink peer for duration */ /* ask sink peer for duration */
res &= gst_pad_query_peer_duration (pad, &format, &duration); res &= gst_pad_peer_query_duration (pad, format, &duration);
/* take max from all valid return values */ /* take max from all valid return values */
if (res) { if (res) {
/* valid unknown length, stop searching */ /* valid unknown length, stop searching */
@ -1079,7 +1137,7 @@ gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
else if (duration > max) else if (duration > max)
max = duration; max = duration;
} }
gst_object_unref (pad); g_value_reset (&item);
break; break;
} }
case GST_ITERATOR_RESYNC: case GST_ITERATOR_RESYNC:
@ -1093,6 +1151,7 @@ gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
break; break;
} }
} }
g_value_unset (&item);
gst_iterator_free (it); gst_iterator_free (it);
if (res) { if (res) {
@ -1113,6 +1172,7 @@ gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
gboolean res; gboolean res;
GstIterator *it; GstIterator *it;
gboolean done; gboolean done;
GValue item = { 0 };
res = TRUE; res = TRUE;
done = FALSE; done = FALSE;
@ -1123,17 +1183,13 @@ gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
/* Take maximum of all latency values */ /* Take maximum of all latency values */
it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix)); it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
while (!done) { while (!done) {
GstIteratorResult ires; switch (gst_iterator_next (it, &item)) {
gpointer item;
ires = gst_iterator_next (it, &item);
switch (ires) {
case GST_ITERATOR_DONE: case GST_ITERATOR_DONE:
done = TRUE; done = TRUE;
break; break;
case GST_ITERATOR_OK: case GST_ITERATOR_OK:
{ {
GstPad *pad = GST_PAD_CAST (item); GstPad *pad = g_value_get_object (&item);
GstQuery *peerquery; GstQuery *peerquery;
GstClockTime min_cur, max_cur; GstClockTime min_cur, max_cur;
gboolean live_cur; gboolean live_cur;
@ -1159,7 +1215,7 @@ gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
} }
gst_query_unref (peerquery); gst_query_unref (peerquery);
gst_object_unref (pad); g_value_reset (&item);
break; break;
} }
case GST_ITERATOR_RESYNC: case GST_ITERATOR_RESYNC:
@ -1175,6 +1231,7 @@ gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
break; break;
} }
} }
g_value_unset (&item);
gst_iterator_free (it); gst_iterator_free (it);
if (res) { if (res) {
@ -1189,9 +1246,9 @@ gst_videomixer2_query_latency (GstVideoMixer2 * mix, GstQuery * query)
} }
static gboolean static gboolean
gst_videomixer2_src_query (GstPad * pad, GstQuery * query) gst_videomixer2_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{ {
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
gboolean res = FALSE; gboolean res = FALSE;
switch (GST_QUERY_TYPE (query)) { switch (GST_QUERY_TYPE (query)) {
@ -1219,6 +1276,9 @@ gst_videomixer2_src_query (GstPad * pad, GstQuery * query)
case GST_QUERY_LATENCY: case GST_QUERY_LATENCY:
res = gst_videomixer2_query_latency (mix, query); res = gst_videomixer2_query_latency (mix, query);
break; break;
case GST_QUERY_CAPS:
res = gst_videomixer2_query_caps (pad, parent, query);
break;
default: default:
/* FIXME, needs a custom query handler because we have multiple /* FIXME, needs a custom query handler because we have multiple
* sinkpads */ * sinkpads */
@ -1226,24 +1286,24 @@ gst_videomixer2_src_query (GstPad * pad, GstQuery * query)
gst_query_unref (query); gst_query_unref (query);
break; break;
} }
gst_object_unref (mix);
return res; return res;
} }
static gboolean static gboolean
gst_videomixer2_src_event (GstPad * pad, GstEvent * event) gst_videomixer2_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{ {
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (pad)); GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
gboolean result; gboolean result;
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_QOS:{ case GST_EVENT_QOS:
{
GstQOSType type;
GstClockTimeDiff diff; GstClockTimeDiff diff;
GstClockTime timestamp; GstClockTime timestamp;
gdouble proportion; gdouble proportion;
gst_event_parse_qos (event, &proportion, &diff, &timestamp); gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
gst_videomixer2_update_qos (mix, proportion, diff, timestamp); gst_videomixer2_update_qos (mix, proportion, diff, timestamp);
@ -1313,7 +1373,7 @@ gst_videomixer2_src_event (GstPad * pad, GstEvent * event)
} }
GST_VIDEO_MIXER2_UNLOCK (mix); GST_VIDEO_MIXER2_UNLOCK (mix);
gst_segment_set_seek (&mix->segment, rate, fmt, flags, start_type, start, gst_segment_do_seek (&mix->segment, rate, fmt, flags, start_type, start,
stop_type, stop, NULL); stop_type, stop, NULL);
mix->segment.position = -1; mix->segment.position = -1;
mix->ts_offset = 0; mix->ts_offset = 0;
@ -1341,7 +1401,7 @@ gst_videomixer2_src_event (GstPad * pad, GstEvent * event)
if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE,
FALSE)) { FALSE)) {
GST_DEBUG_OBJECT (mix, "pending flush stop"); GST_DEBUG_OBJECT (mix, "pending flush stop");
gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ()); gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop (TRUE));
} }
break; break;
@ -1356,20 +1416,15 @@ gst_videomixer2_src_event (GstPad * pad, GstEvent * event)
result = gst_videomixer2_push_sink_event (mix, event); result = gst_videomixer2_push_sink_event (mix, event);
break; break;
} }
gst_object_unref (mix);
return result; return result;
} }
static gboolean static gboolean
gst_videomixer2_src_setcaps (GstPad * pad, GstCaps * caps) gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix, GstCaps * caps)
{ {
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent_element (pad));
gboolean ret = FALSE; gboolean ret = FALSE;
GstVideoFormat fmt; GstVideoInfo info;
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps); GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps);
@ -1378,14 +1433,13 @@ gst_videomixer2_src_setcaps (GstPad * pad, GstCaps * caps)
mix->fill_checker = NULL; mix->fill_checker = NULL;
mix->fill_color = NULL; mix->fill_color = NULL;
if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) || if (!gst_video_info_from_caps (&info, caps))
!gst_video_parse_caps_framerate (caps, &fps_n, &fps_d) ||
!gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d))
goto done; goto done;
GST_VIDEO_MIXER2_LOCK (mix); GST_VIDEO_MIXER2_LOCK (mix);
if (mix->fps_n != fps_n || mix->fps_d != fps_d) { if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&info) ||
GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&info)) {
if (mix->segment.position != -1) { if (mix->segment.position != -1) {
mix->ts_offset = mix->segment.position - mix->segment.start; mix->ts_offset = mix->segment.position - mix->segment.start;
mix->nframes = 0; mix->nframes = 0;
@ -1393,15 +1447,9 @@ gst_videomixer2_src_setcaps (GstPad * pad, GstCaps * caps)
gst_videomixer2_reset_qos (mix); gst_videomixer2_reset_qos (mix);
} }
mix->format = fmt; mix->info = info;
mix->width = width;
mix->height = height;
mix->fps_n = fps_n;
mix->fps_d = fps_d;
mix->par_n = par_n;
mix->par_d = par_d;
switch (mix->format) { switch (GST_VIDEO_INFO_FORMAT (&mix->info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
mix->blend = gst_video_mixer_blend_ayuv; mix->blend = gst_video_mixer_blend_ayuv;
mix->overlay = gst_video_mixer_overlay_ayuv; mix->overlay = gst_video_mixer_overlay_ayuv;
@ -1540,8 +1588,8 @@ gst_videomixer2_src_setcaps (GstPad * pad, GstCaps * caps)
} }
GST_VIDEO_MIXER2_UNLOCK (mix); GST_VIDEO_MIXER2_UNLOCK (mix);
ret = gst_pad_set_caps (pad, caps);
done: done:
gst_object_unref (mix);
return ret; return ret;
} }
@ -1564,7 +1612,9 @@ gst_videomixer2_sink_clip (GstCollectPads2 * pads,
end_time = GST_BUFFER_DURATION (buf); end_time = GST_BUFFER_DURATION (buf);
if (end_time == -1) if (end_time == -1)
end_time = gst_util_uint64_scale_int (GST_SECOND, pad->fps_d, pad->fps_n); end_time =
gst_util_uint64_scale_int (GST_SECOND,
GST_VIDEO_INFO_FPS_D (&pad->info), GST_VIDEO_INFO_FPS_N (&pad->info));
if (end_time == -1) { if (end_time == -1) {
*outbuf = buf; *outbuf = buf;
return GST_FLOW_OK; return GST_FLOW_OK;
@ -1610,12 +1660,12 @@ gst_videomixer2_sink_event (GstCollectPads2 * pads, GstCollectData2 * cdata,
/* return FALSE => event will be forwarded */ /* return FALSE => event will be forwarded */
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:{ case GST_EVENT_SEGMENT:{
GstFormat fmt; GstSegment seg;
gst_event_parse_new_segment (event, NULL, NULL, &fmt, NULL, NULL, NULL); gst_event_copy_segment (event, &seg);
g_assert (fmt == GST_FORMAT_TIME); g_assert (seg.format == GST_FORMAT_TIME);
/* eat NEWSEGMENT events */ /* eat SEGMENT events */
ret = TRUE; ret = TRUE;
gst_event_unref (event); gst_event_unref (event);
break; break;
@ -1639,7 +1689,6 @@ gst_videomixer2_sink_event (GstCollectPads2 * pads, GstCollectData2 * cdata,
ret = gst_pad_event_default (cdata->pad, GST_OBJECT (mix), event); ret = gst_pad_event_default (cdata->pad, GST_OBJECT (mix), event);
break; break;
case GST_EVENT_EOS: case GST_EVENT_EOS:
case GST_EVENT_SEGMENT:
gst_event_unref (event); gst_event_unref (event);
ret = TRUE; ret = TRUE;
break; break;
@ -1720,7 +1769,7 @@ gst_videomixer2_change_state (GstElement * element, GstStateChange transition)
static GstPad * static GstPad *
gst_videomixer2_request_new_pad (GstElement * element, gst_videomixer2_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * req_name) GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
{ {
GstVideoMixer2 *mix; GstVideoMixer2 *mix;
GstVideoMixer2Pad *mixpad; GstVideoMixer2Pad *mixpad;
@ -1805,7 +1854,7 @@ gst_videomixer2_release_pad (GstElement * element, GstPad * pad)
gst_child_proxy_child_removed (GST_OBJECT (mix), GST_OBJECT (mixpad)); gst_child_proxy_child_removed (GST_OBJECT (mix), GST_OBJECT (mixpad));
mix->numpads--; mix->numpads--;
update_caps = mix->format != GST_VIDEO_FORMAT_UNKNOWN; update_caps = GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN;
GST_VIDEO_MIXER2_UNLOCK (mix); GST_VIDEO_MIXER2_UNLOCK (mix);
gst_collect_pads2_remove_pad (mix->collect, pad); gst_collect_pads2_remove_pad (mix->collect, pad);
@ -1826,7 +1875,7 @@ gst_videomixer2_finalize (GObject * o)
GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o); GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
gst_object_unref (mix->collect); gst_object_unref (mix->collect);
g_mutex_free (mix->lock); g_mutex_clear (&mix->lock);
G_OBJECT_CLASS (parent_class)->finalize (o); G_OBJECT_CLASS (parent_class)->finalize (o);
} }
@ -1947,10 +1996,6 @@ gst_videomixer2_init (GstVideoMixer2 * mix)
mix->srcpad = mix->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass, gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"src"), "src"); "src"), "src");
gst_pad_set_getcaps_function (GST_PAD (mix->srcpad),
GST_DEBUG_FUNCPTR (gst_videomixer2_src_getcaps));
gst_pad_set_setcaps_function (GST_PAD (mix->srcpad),
GST_DEBUG_FUNCPTR (gst_videomixer2_src_setcaps));
gst_pad_set_query_function (GST_PAD (mix->srcpad), gst_pad_set_query_function (GST_PAD (mix->srcpad),
GST_DEBUG_FUNCPTR (gst_videomixer2_src_query)); GST_DEBUG_FUNCPTR (gst_videomixer2_src_query));
gst_pad_set_event_function (GST_PAD (mix->srcpad), gst_pad_set_event_function (GST_PAD (mix->srcpad),
@ -1968,7 +2013,7 @@ gst_videomixer2_init (GstVideoMixer2 * mix)
gst_collect_pads2_set_clip_function (mix->collect, gst_collect_pads2_set_clip_function (mix->collect,
(GstCollectPads2ClipFunction) gst_videomixer2_sink_clip, mix); (GstCollectPads2ClipFunction) gst_videomixer2_sink_clip, mix);
mix->lock = g_mutex_new (); g_mutex_init (&mix->lock);
/* initialize variables */ /* initialize variables */
gst_videomixer2_reset (mix); gst_videomixer2_reset (mix);
} }
@ -1977,7 +2022,7 @@ gst_videomixer2_init (GstVideoMixer2 * mix)
static gboolean static gboolean
plugin_init (GstPlugin * plugin) plugin_init (GstPlugin * plugin)
{ {
GST_DEBUG_CATEGORY_INIT (gst_videomixer_debug, "videomixer", 0, GST_DEBUG_CATEGORY_INIT (gst_videomixer2_debug, "videomixer", 0,
"video mixer"); "video mixer");
gst_video_mixer_init_blend (); gst_video_mixer_init_blend ();

View file

@ -75,7 +75,7 @@ struct _GstVideoMixer2
GstPad *srcpad; GstPad *srcpad;
/* Lock to prevent the state to change while blending */ /* Lock to prevent the state to change while blending */
GMutex *lock; GMutex lock;
/* Sink pads using Collect Pads 2*/ /* Sink pads using Collect Pads 2*/
GstCollectPads2 *collect; GstCollectPads2 *collect;