smpte: port to 0.11

This commit is contained in:
Mark Nauwelaerts 2012-03-22 18:21:52 +01:00
parent cc32b51ddc
commit bcf5f38b16
5 changed files with 324 additions and 212 deletions

View file

@ -312,7 +312,6 @@ dnl *** plug-ins to include ***
dnl Non ported plugins (non-dependant, then dependant)
dnl Make sure you have a space before and after all plugins
GST_PLUGINS_NONPORTED="deinterlace interleave flx \
smpte \
videobox \
cairo cairo_gobject dv1394 gdk_pixbuf \
oss oss4 \

View file

@ -44,7 +44,6 @@
#endif
#include <string.h>
#include "gstsmpte.h"
#include <gst/video/video.h>
#include "paint.h"
GST_DEBUG_CATEGORY_STATIC (gst_smpte_debug);
@ -54,7 +53,7 @@ static GstStaticPadTemplate gst_smpte_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
)
);
@ -62,7 +61,7 @@ static GstStaticPadTemplate gst_smpte_sink1_template =
GST_STATIC_PAD_TEMPLATE ("sink1",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
)
);
@ -70,7 +69,7 @@ static GstStaticPadTemplate gst_smpte_sink2_template =
GST_STATIC_PAD_TEMPLATE ("sink2",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
)
);
@ -148,7 +147,6 @@ gst_smpte_transition_type_get_type (void)
static void gst_smpte_class_init (GstSMPTEClass * klass);
static void gst_smpte_base_init (GstSMPTEClass * klass);
static void gst_smpte_init (GstSMPTE * smpte);
static void gst_smpte_finalize (GstSMPTE * smpte);
@ -175,7 +173,7 @@ gst_smpte_get_type (void)
if (!smpte_type) {
static const GTypeInfo smpte_info = {
sizeof (GstSMPTEClass),
(GBaseInitFunc) gst_smpte_base_init,
NULL,
NULL,
(GClassInitFunc) gst_smpte_class_init,
NULL,
@ -191,23 +189,6 @@ gst_smpte_get_type (void)
return smpte_type;
}
static void
gst_smpte_base_init (GstSMPTEClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_sink1_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_sink2_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_src_template));
gst_element_class_set_details_simple (element_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions on video images",
"Wim Taymans <wim.taymans@chello.be>");
}
static void
gst_smpte_class_init (GstSMPTEClass * klass)
{
@ -251,6 +232,17 @@ gst_smpte_class_init (GstSMPTEClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_smpte_change_state);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_smpte_sink1_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_smpte_sink2_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_smpte_src_template));
gst_element_class_set_details_simple (gstelement_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions on video images",
"Wim Taymans <wim.taymans@chello.be>");
}
/* wht yel cya grn mag red blu blk -I Q */
@ -307,20 +299,20 @@ static gboolean
gst_smpte_setcaps (GstPad * pad, GstCaps * caps)
{
GstSMPTE *smpte;
GstStructure *structure;
gboolean ret;
GstVideoInfo vinfo;
smpte = GST_SMPTE (GST_PAD_PARENT (pad));
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get_int (structure, "width", &smpte->width);
ret &= gst_structure_get_int (structure, "height", &smpte->height);
ret &= gst_structure_get_fraction (structure, "framerate",
&smpte->fps_num, &smpte->fps_denom);
if (!ret)
gst_video_info_init (&vinfo);
if (!gst_video_info_from_caps (&vinfo, caps))
return FALSE;
smpte->width = GST_VIDEO_INFO_WIDTH (&vinfo);
smpte->height = GST_VIDEO_INFO_HEIGHT (&vinfo);
smpte->fps_num = GST_VIDEO_INFO_FPS_N (&vinfo);
smpte->fps_denom = GST_VIDEO_INFO_FPS_D (&vinfo);
/* for backward compat, we store these here */
smpte->fps = ((gdouble) smpte->fps_num) / smpte->fps_denom;
@ -334,6 +326,42 @@ gst_smpte_setcaps (GstPad * pad, GstCaps * caps)
gst_smpte_update_mask (smpte, smpte->type, smpte->invert, smpte->depth,
smpte->width, smpte->height);
if (pad == smpte->sinkpad1) {
GST_DEBUG_OBJECT (smpte, "setting pad1 info");
smpte->vinfo1 = vinfo;
} else {
GST_DEBUG_OBJECT (smpte, "setting pad2 info");
smpte->vinfo2 = vinfo;
}
return ret;
}
static gboolean
gst_smpte_sink_event (GstCollectPads2 * pads,
GstCollectData2 * data, GstEvent * event, gpointer user_data)
{
GstPad *pad;
gboolean ret = FALSE;
pad = data->pad;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
ret = gst_smpte_setcaps (pad, caps);
gst_event_unref (event);
event = NULL;
break;
}
default:
ret = gst_pad_event_default (pad,
GST_OBJECT_CAST (GST_PAD_PARENT (pad)), event);
}
return ret;
}
@ -342,18 +370,12 @@ gst_smpte_init (GstSMPTE * smpte)
{
smpte->sinkpad1 =
gst_pad_new_from_static_template (&gst_smpte_sink1_template, "sink1");
gst_pad_set_setcaps_function (smpte->sinkpad1,
GST_DEBUG_FUNCPTR (gst_smpte_setcaps));
gst_pad_set_getcaps_function (smpte->sinkpad1,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (smpte->sinkpad1);
gst_element_add_pad (GST_ELEMENT (smpte), smpte->sinkpad1);
smpte->sinkpad2 =
gst_pad_new_from_static_template (&gst_smpte_sink2_template, "sink2");
gst_pad_set_setcaps_function (smpte->sinkpad2,
GST_DEBUG_FUNCPTR (gst_smpte_setcaps));
gst_pad_set_getcaps_function (smpte->sinkpad2,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (smpte->sinkpad2);
gst_element_add_pad (GST_ELEMENT (smpte), smpte->sinkpad2);
smpte->srcpad =
@ -363,6 +385,8 @@ gst_smpte_init (GstSMPTE * smpte)
smpte->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (smpte->collect,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_smpte_collected), smpte);
gst_collect_pads2_set_event_function (smpte->collect,
GST_DEBUG_FUNCPTR (gst_smpte_sink_event), smpte);
gst_collect_pads2_add_pad (smpte->collect, smpte->sinkpad1,
sizeof (GstCollectData2));
@ -399,15 +423,15 @@ gst_smpte_reset (GstSMPTE * smpte)
}
static void
gst_smpte_blend_i420 (guint8 * in1, guint8 * in2, guint8 * out, GstMask * mask,
gint width, gint height, gint border, gint pos)
gst_smpte_blend_i420 (GstVideoFrame * frame1, GstVideoFrame * frame2,
GstVideoFrame * oframe, GstMask * mask, gint border, gint pos)
{
guint32 *maskp;
gint value;
gint i, j;
gint min, max;
guint8 *in1u, *in1v, *in2u, *in2v, *outu, *outv;
gint uoffset, voffset, ystr, ustr, vstr;
guint8 *in1, *in2, *out, *in1u, *in1v, *in2u, *in2v, *outu, *outv;
gint width, height;
if (border == 0)
border++;
@ -415,19 +439,19 @@ gst_smpte_blend_i420 (guint8 * in1, guint8 * in2, guint8 * out, GstMask * mask,
min = pos - border;
max = pos;
uoffset = I420_U_OFFSET (width, height);
voffset = I420_V_OFFSET (width, height);
width = GST_VIDEO_FRAME_WIDTH (frame1);
height = GST_VIDEO_FRAME_HEIGHT (frame1);
ystr = I420_Y_ROWSTRIDE (width);
ustr = I420_U_ROWSTRIDE (width);
vstr = I420_V_ROWSTRIDE (width);
in1 = GST_VIDEO_FRAME_COMP_DATA (frame1, 0);
in2 = GST_VIDEO_FRAME_COMP_DATA (frame2, 0);
out = GST_VIDEO_FRAME_COMP_DATA (oframe, 0);
in1u = in1 + uoffset;
in1v = in1 + voffset;
in2u = in2 + uoffset;
in2v = in2 + voffset;
outu = out + uoffset;
outv = out + voffset;
in1u = GST_VIDEO_FRAME_COMP_DATA (frame1, 1);
in1v = GST_VIDEO_FRAME_COMP_DATA (frame1, 2);
in2u = GST_VIDEO_FRAME_COMP_DATA (frame2, 1);
in2v = GST_VIDEO_FRAME_COMP_DATA (frame2, 2);
outu = GST_VIDEO_FRAME_COMP_DATA (oframe, 1);
outv = GST_VIDEO_FRAME_COMP_DATA (oframe, 2);
maskp = mask->data;
@ -444,16 +468,18 @@ gst_smpte_blend_i420 (guint8 * in1, guint8 * in2, guint8 * out, GstMask * mask,
((in1v[j / 2] * value) + (in2v[j / 2] * (256 - value))) >> 8;
}
}
out += ystr;
in1 += ystr;
in2 += ystr;
in1 += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 0);
in2 += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 0);
out += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 0);
if (!(i & 1)) {
outu += ustr;
in1u += ustr;
in2u += ustr;
outv += vstr;
in1v += vstr;
in2v += vstr;
in1u += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 1);
in2u += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 1);
in1v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
in2v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
outu += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 1);
outv += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 2);
}
}
}
@ -465,11 +491,14 @@ gst_smpte_collected (GstCollectPads2 * pads, GstSMPTE * smpte)
GstClockTime ts;
GstBuffer *in1 = NULL, *in2 = NULL;
GSList *collected;
GstMapInfo map;
GstVideoFrame frame1, frame2, oframe;
if (G_UNLIKELY (smpte->fps_num == 0))
goto not_negotiated;
if (!GST_PAD_CAPS (smpte->sinkpad1) || !GST_PAD_CAPS (smpte->sinkpad2))
if (!gst_pad_has_current_caps (smpte->sinkpad1) ||
!gst_pad_has_current_caps (smpte->sinkpad2))
goto not_negotiated;
ts = gst_util_uint64_scale_int (smpte->position * GST_SECOND,
@ -489,23 +518,31 @@ gst_smpte_collected (GstCollectPads2 * pads, GstSMPTE * smpte)
if (in1 == NULL) {
/* if no input, make picture black */
in1 = gst_buffer_new_and_alloc (I420_SIZE (smpte->width, smpte->height));
fill_i420 (GST_BUFFER_DATA (in1), smpte->width, smpte->height, 7);
gst_buffer_map (in1, &map, GST_MAP_WRITE);
fill_i420 (map.data, smpte->width, smpte->height, 7);
gst_buffer_unmap (in1, &map);
}
if (in2 == NULL) {
/* if no input, make picture white */
in2 = gst_buffer_new_and_alloc (I420_SIZE (smpte->width, smpte->height));
fill_i420 (GST_BUFFER_DATA (in2), smpte->width, smpte->height, 0);
gst_buffer_map (in2, &map, GST_MAP_WRITE);
fill_i420 (map.data, smpte->width, smpte->height, 0);
gst_buffer_unmap (in1, &map);
}
if (GST_BUFFER_SIZE (in1) != GST_BUFFER_SIZE (in2))
if (GST_VIDEO_INFO_WIDTH (&smpte->vinfo1) !=
GST_VIDEO_INFO_WIDTH (&smpte->vinfo2) ||
GST_VIDEO_INFO_HEIGHT (&smpte->vinfo1) !=
GST_VIDEO_INFO_HEIGHT (&smpte->vinfo2))
goto input_formats_do_not_match;
if (smpte->position < smpte->end_position) {
outbuf = gst_buffer_new_and_alloc (I420_SIZE (smpte->width, smpte->height));
/* set caps if not done yet */
if (!GST_PAD_CAPS (smpte->srcpad)) {
if (!gst_pad_has_current_caps (smpte->srcpad)) {
GstCaps *caps;
GstSegment segment;
caps =
gst_caps_copy (gst_static_caps_get
@ -516,21 +553,20 @@ gst_smpte_collected (GstCollectPads2 * pads, GstSMPTE * smpte)
gst_pad_set_caps (smpte->srcpad, caps);
gst_pad_push_event (smpte->srcpad,
gst_event_new_new_segment_full (FALSE,
1.0, 1.0, GST_FORMAT_TIME, 0, -1, 0));
gst_segment_init (&segment, GST_FORMAT_TIME);
gst_pad_push_event (smpte->srcpad, gst_event_new_segment (&segment));
}
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (smpte->srcpad));
gst_smpte_blend_i420 (GST_BUFFER_DATA (in1),
GST_BUFFER_DATA (in2),
GST_BUFFER_DATA (outbuf),
smpte->mask, smpte->width, smpte->height,
smpte->border,
gst_video_frame_map (&frame1, &smpte->vinfo1, in1, GST_MAP_READ);
gst_video_frame_map (&frame2, &smpte->vinfo2, in2, GST_MAP_READ);
/* re-use either info, now know they are essentially identical */
gst_video_frame_map (&oframe, &smpte->vinfo1, outbuf, GST_MAP_WRITE);
gst_smpte_blend_i420 (&frame1, &frame2, &oframe, smpte->mask, smpte->border,
((1 << smpte->depth) + smpte->border) *
smpte->position / smpte->end_position);
gst_video_frame_unmap (&frame1);
gst_video_frame_unmap (&frame2);
gst_video_frame_unmap (&oframe);
} else {
outbuf = in2;
gst_buffer_ref (in2);
@ -556,9 +592,15 @@ not_negotiated:
}
input_formats_do_not_match:
{
GstCaps *caps1, *caps2;
caps1 = gst_pad_get_current_caps (smpte->sinkpad1);
caps2 = gst_pad_get_current_caps (smpte->sinkpad2);
GST_ELEMENT_ERROR (smpte, CORE, NEGOTIATION, (NULL),
("input formats don't match: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
GST_PAD_CAPS (smpte->sinkpad1), GST_PAD_CAPS (smpte->sinkpad2)));
caps1, caps2));
gst_caps_unref (caps1);
gst_caps_unref (caps2);
return GST_FLOW_ERROR;
}
}

View file

@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
@ -65,6 +66,8 @@ struct _GstSMPTE {
gdouble fps;
gint fps_num;
gint fps_denom;
GstVideoInfo vinfo1;
GstVideoInfo vinfo2;
/* state of the effect */
gint position;

View file

@ -63,19 +63,20 @@ static GstStaticPadTemplate gst_smpte_alpha_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";"
GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";"
GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ARGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("AYUV") ";"
GST_VIDEO_CAPS_MAKE ("ARGB") ";" GST_VIDEO_CAPS_MAKE ("BGRA") ";"
GST_VIDEO_CAPS_MAKE ("RGBA") ";" GST_VIDEO_CAPS_MAKE ("ARGB"))
);
static GstStaticPadTemplate gst_smpte_alpha_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12")
";" GST_VIDEO_CAPS_YUV ("AYUV")
";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_BGRA ";" GST_VIDEO_CAPS_RGBA
";" GST_VIDEO_CAPS_ARGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420") ";"
GST_VIDEO_CAPS_MAKE ("YV12")
";" GST_VIDEO_CAPS_MAKE ("AYUV")
";" GST_VIDEO_CAPS_MAKE ("ARGB") ";" GST_VIDEO_CAPS_MAKE ("BGRA")
";" GST_VIDEO_CAPS_MAKE ("RGBA") ";" GST_VIDEO_CAPS_MAKE ("ARGB"))
);
/* SMPTE signals and properties */
@ -142,40 +143,26 @@ static void gst_smpte_alpha_set_property (GObject * object, guint prop_id,
static void gst_smpte_alpha_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_smpte_alpha_setcaps (GstBaseTransform * btrans,
GstCaps * incaps, GstCaps * outcaps);
static gboolean gst_smpte_alpha_get_unit_size (GstBaseTransform * btrans,
GstCaps * caps, guint * size);
static GstFlowReturn gst_smpte_alpha_transform (GstBaseTransform * trans,
GstBuffer * in, GstBuffer * out);
static gboolean gst_smpte_alpha_set_info (GstVideoFilter * vfilter,
GstCaps * incaps, GstVideoInfo * in_info,
GstCaps * outcaps, GstVideoInfo * out_info);
static GstFlowReturn gst_smpte_alpha_transform_frame (GstVideoFilter * vfilter,
GstVideoFrame * in_frame, GstVideoFrame * out_frame);
static void gst_smpte_alpha_before_transform (GstBaseTransform * trans,
GstBuffer * buf);
static GstCaps *gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * from);
GstPadDirection direction, GstCaps * from, GstCaps * filter);
GST_BOILERPLATE (GstSMPTEAlpha, gst_smpte_alpha, GstVideoFilter,
GST_TYPE_VIDEO_FILTER);
static void
gst_smpte_alpha_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_alpha_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_alpha_src_template));
gst_element_class_set_details_simple (element_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions as alpha on video images",
"Wim Taymans <wim.taymans@gmail.com>");
}
#define gst_smpte_alpha_parent_class parent_class
G_DEFINE_TYPE (GstSMPTEAlpha, gst_smpte_alpha, GST_TYPE_VIDEO_FILTER);
static void
gst_smpte_alpha_class_init (GstSMPTEAlphaClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *element_class = (GstElementClass *) (klass);
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_smpte_alpha_set_property;
gobject_class->get_property = gst_smpte_alpha_get_property;
@ -212,14 +199,23 @@ gst_smpte_alpha_class_init (GstSMPTEAlphaClass * klass)
"Invert transition mask", DEFAULT_PROP_POSITION,
GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_smpte_alpha_setcaps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_smpte_alpha_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_smpte_alpha_transform);
trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_smpte_alpha_before_transform);
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_smpte_alpha_transform_caps);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_smpte_alpha_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_smpte_alpha_transform_frame);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_alpha_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_smpte_alpha_src_template));
gst_element_class_set_details_simple (element_class, "SMPTE transitions",
"Filter/Editor/Video",
"Apply the standard SMPTE transitions as alpha on video images",
"Wim Taymans <wim.taymans@gmail.com>");
}
static gboolean
@ -269,7 +265,7 @@ mask_failed:
}
static void
gst_smpte_alpha_init (GstSMPTEAlpha * smpte, GstSMPTEAlphaClass * klass)
gst_smpte_alpha_init (GstSMPTEAlpha * smpte)
{
smpte->type = DEFAULT_PROP_TYPE;
smpte->border = DEFAULT_PROP_BORDER;
@ -280,14 +276,17 @@ gst_smpte_alpha_init (GstSMPTEAlpha * smpte, GstSMPTEAlphaClass * klass)
#define CREATE_ARGB_FUNC(name, A, R, G, B) \
static void \
gst_smpte_alpha_process_##name##_##name (GstSMPTEAlpha * smpte, const guint8 * in, \
guint8 * out, GstMask * mask, gint width, gint height, gint border, \
gint pos) \
gst_smpte_alpha_process_##name##_##name (GstSMPTEAlpha * smpte, \
const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask, \
gint border, gint pos) \
{ \
gint i, j; \
const guint32 *maskp; \
gint value; \
gint min, max; \
gint width, height; \
guint8 *in, *out; \
gint src_wrap, dest_wrap; \
\
if (border == 0) \
border++; \
@ -299,6 +298,14 @@ gst_smpte_alpha_process_##name##_##name (GstSMPTEAlpha * smpte, const guint8 * i
\
maskp = mask->data; \
\
width = GST_VIDEO_FRAME_WIDTH (out_frame); \
height = GST_VIDEO_FRAME_HEIGHT (out_frame); \
\
in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0); \
out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0); \
src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2); \
dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2); \
\
/* we basically copy the source to dest but we scale the alpha channel with \
* the mask */ \
for (i = 0; i < height; i++) { \
@ -311,6 +318,8 @@ gst_smpte_alpha_process_##name##_##name (GstSMPTEAlpha * smpte, const guint8 * i
out += 4; \
in += 4; \
} \
in += src_wrap; \
out += dest_wrap; \
} \
}
@ -320,14 +329,17 @@ CREATE_ARGB_FUNC (abgr, 0, 3, 2, 1);
CREATE_ARGB_FUNC (rgba, 3, 0, 1, 2);
static void
gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
guint8 * out, GstMask * mask, gint width, gint height, gint border,
gint pos)
gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte,
const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
gint border, gint pos)
{
gint i, j;
const guint32 *maskp;
gint value;
gint min, max;
gint width, height;
guint8 *in, *out;
gint src_wrap, dest_wrap;
if (border == 0)
border++;
@ -339,6 +351,14 @@ gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
maskp = mask->data;
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2);
dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);
/* we basically copy the source to dest but we scale the alpha channel with
* the mask */
for (i = 0; i < height; i++) {
@ -349,24 +369,28 @@ gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
*out++ = *in++;
*out++ = *in++;
}
in += src_wrap;
out += dest_wrap;
}
}
static void
gst_smpte_alpha_process_i420_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
guint8 * out, GstMask * mask, gint width, gint height, gint border,
gint pos)
gst_smpte_alpha_process_i420_ayuv (GstSMPTEAlpha * smpte,
const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
gint border, gint pos)
{
const guint8 *srcY;
const guint8 *srcU;
const guint8 *srcV;
guint8 *out;
gint i, j;
gint src_wrap, src_uv_wrap;
gint y_stride, uv_stride;
gint src_wrap, src_u_wrap, src_v_wrap, dest_wrap;
gint y_stride, u_stride, v_stride;
gboolean odd_width;
const guint32 *maskp;
gint value;
gint min, max;
gint width, height;
if (border == 0)
border++;
@ -378,17 +402,23 @@ gst_smpte_alpha_process_i420_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
maskp = mask->data;
y_stride = gst_video_format_get_row_stride (smpte->in_format, 0, width);
uv_stride = gst_video_format_get_row_stride (smpte->in_format, 1, width);
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
u_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
v_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 2);
src_wrap = y_stride - width;
src_uv_wrap = uv_stride - (width / 2);
src_u_wrap = u_stride - (width / 2);
src_v_wrap = v_stride - (width / 2);
srcY = in;
srcU = in + gst_video_format_get_component_offset (smpte->in_format,
1, width, height);
srcV = in + gst_video_format_get_component_offset (smpte->in_format,
2, width, height);
srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);
odd_width = (width % 2 != 0);
@ -417,10 +447,11 @@ gst_smpte_alpha_process_i420_ayuv (GstSMPTEAlpha * smpte, const guint8 * in,
srcU -= width / 2;
srcV -= width / 2;
} else {
srcU += src_uv_wrap;
srcV += src_uv_wrap;
srcU += src_u_wrap;
srcV += src_v_wrap;
}
srcY += src_wrap;
out += dest_wrap;
}
}
@ -443,27 +474,24 @@ gst_smpte_alpha_before_transform (GstBaseTransform * trans, GstBuffer * buf)
}
static GstFlowReturn
gst_smpte_alpha_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
gst_smpte_alpha_transform_frame (GstVideoFilter * vfilter,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (trans);
GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (vfilter);
gdouble position;
gint border;
if (G_UNLIKELY (!smpte->process))
goto not_negotiated;
/* these are the propertis we update with only the object lock, others are
* only updated with the TRANSFORM_LOCK. */
GST_OBJECT_LOCK (smpte);
position = smpte->position;
border = smpte->border;
GST_OBJECT_UNLOCK (smpte);
/* run the type specific filter code */
smpte->process (smpte, GST_BUFFER_DATA (in), GST_BUFFER_DATA (out),
smpte->mask, smpte->width, smpte->height, border,
((1 << smpte->depth) + border) * position);
smpte->process (smpte, in_frame, out_frame,
smpte->mask, border, ((1 << smpte->depth) + border) * position);
GST_OBJECT_UNLOCK (smpte);
return GST_FLOW_OK;
@ -478,56 +506,126 @@ not_negotiated:
static GstCaps *
gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * from)
GstPadDirection direction, GstCaps * from, GstCaps * filter)
{
GstCaps *to = gst_caps_copy (from);
GstStructure *s;
GstCaps *result, *tmp_caps, *tmpl_caps = NULL;
gint i, j;
gst_caps_truncate (to);
s = gst_caps_get_structure (to, 0);
tmp_caps = gst_caps_new_empty ();
if (gst_structure_has_name (s, "video/x-raw-yuv")) {
for (i = 0; i < gst_caps_get_size (from); i++) {
GstStructure *structure;
const GValue *val, *lval;
GValue list = { 0, };
GValue val = { 0, };
GValue aval = { 0, };
const gchar *str;
gst_structure_remove_field (s, "format");
structure = gst_structure_copy (gst_caps_get_structure (from, i));
/* we can transform I420 to AYUV,
* so need to locate and substitute AYUV for the both of them */
val = gst_structure_get_value (structure, "format");
if (val && GST_VALUE_HOLDS_LIST (val)) {
gboolean seen_ayuv = FALSE, seen_i420 = FALSE;
g_value_init (&list, GST_TYPE_LIST);
g_value_init (&val, GST_TYPE_FOURCC);
gst_value_set_fourcc (&val, GST_STR_FOURCC ("AYUV"));
gst_value_list_append_value (&list, &val);
g_value_reset (&val);
gst_value_set_fourcc (&val, GST_STR_FOURCC ("I420"));
gst_value_list_append_value (&list, &val);
g_value_reset (&val);
gst_value_set_fourcc (&val, GST_STR_FOURCC ("YV12"));
gst_value_list_append_value (&list, &val);
g_value_unset (&val);
gst_structure_set_value (s, "format", &list);
g_value_unset (&list);
} else if (!gst_structure_has_name (s, "video/x-raw-rgb")) {
gst_caps_unref (to);
to = gst_caps_new_empty ();
g_value_init (&list, GST_TYPE_LIST);
for (j = 0; j < gst_value_list_get_size (val); j++) {
lval = gst_value_list_get_value (val, j);
if ((str = g_value_get_string (lval))) {
if (strcmp (str, "AYUV") == 0) {
seen_ayuv = TRUE;
} else if (strcmp (str, "I420") == 0) {
seen_i420 = TRUE;
}
}
}
if (seen_ayuv && !seen_i420) {
str = "I420";
} else if (seen_i420 && !seen_ayuv) {
str = "AYUV";
} else
str = NULL;
if (str) {
g_value_copy (val, &list);
g_value_init (&aval, G_TYPE_STRING);
g_value_set_string (&aval, str);
gst_value_list_append_value (&list, &aval);
g_value_reset (&aval);
gst_structure_set_value (structure, "format", &list);
g_value_unset (&list);
}
} else if (val && G_VALUE_HOLDS_STRING (val)) {
if ((str = g_value_get_string (val)) &&
((strcmp (str, "AYUV") == 0) || (strcmp (str, "I420") == 0))) {
g_value_init (&list, GST_TYPE_LIST);
g_value_init (&aval, G_TYPE_STRING);
g_value_set_string (&aval, "AYUV");
gst_value_list_append_value (&list, &aval);
g_value_reset (&aval);
g_value_set_string (&aval, "I420");
gst_value_list_append_value (&list, &aval);
g_value_reset (&aval);
gst_structure_set_value (structure, "format", &list);
g_value_unset (&list);
}
} else {
gst_structure_remove_field (structure, "format");
}
gst_structure_remove_field (structure, "color-matrix");
gst_structure_remove_field (structure, "chroma-site");
gst_caps_append_structure (tmp_caps, structure);
}
return to;
/* Get the appropriate template */
if (direction == GST_PAD_SINK) {
tmpl_caps =
gst_static_pad_template_get_caps (&gst_smpte_alpha_src_template);
} else if (direction == GST_PAD_SRC) {
tmpl_caps =
gst_static_pad_template_get_caps (&gst_smpte_alpha_sink_template);
} else {
g_assert_not_reached ();
}
/* Intersect with our template caps */
result = gst_caps_intersect (tmp_caps, tmpl_caps);
gst_caps_unref (tmpl_caps);
gst_caps_unref (tmp_caps);
result = gst_caps_simplify (result);
GST_LOG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
from, result);
if (filter) {
GstCaps *intersection;
GST_DEBUG_OBJECT (trans, "Using filter caps %" GST_PTR_FORMAT, filter);
intersection =
gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (result);
result = intersection;
GST_DEBUG_OBJECT (trans, "Intersection %" GST_PTR_FORMAT, result);
}
return result;
}
static gboolean
gst_smpte_alpha_setcaps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
gst_smpte_alpha_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (btrans);
GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (vfilter);
gboolean ret;
gint width, height;
smpte->process = NULL;
if (!gst_video_format_parse_caps (incaps, &smpte->in_format, &width, &height))
goto invalid_caps;
if (!gst_video_format_parse_caps (outcaps, &smpte->out_format, &width,
&height))
goto invalid_caps;
smpte->in_format = GST_VIDEO_INFO_FORMAT (in_info);
smpte->out_format = GST_VIDEO_INFO_FORMAT (out_info);
smpte->width = width = GST_VIDEO_INFO_WIDTH (out_info);
smpte->height = height = GST_VIDEO_INFO_HEIGHT (out_info);
/* try to update the mask now, this will also adjust the width/height on
* success */
@ -595,11 +693,6 @@ gst_smpte_alpha_setcaps (GstBaseTransform * btrans, GstCaps * incaps,
return ret;
/* ERRORS */
invalid_caps:
{
GST_ERROR_OBJECT (smpte, "Invalid caps: %" GST_PTR_FORMAT, incaps);
return FALSE;
}
mask_failed:
{
GST_ERROR_OBJECT (smpte, "failed creating the mask");
@ -607,21 +700,6 @@ mask_failed:
}
}
static gboolean
gst_smpte_alpha_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
gint width, height;
GstVideoFormat format;
if (!gst_video_format_parse_caps (caps, &format, &width, &height))
return FALSE;
*size = gst_video_format_get_size (format, width, height);
return TRUE;
}
static void
gst_smpte_alpha_finalize (GstSMPTEAlpha * smpte)
{
@ -644,12 +722,10 @@ gst_smpte_alpha_set_property (GObject * object, guint prop_id,
type = g_value_get_enum (value);
GST_BASE_TRANSFORM_LOCK (smpte);
GST_OBJECT_LOCK (smpte);
gst_smpte_alpha_update_mask (smpte, type, smpte->invert,
smpte->depth, smpte->width, smpte->height);
GST_OBJECT_UNLOCK (smpte);
GST_BASE_TRANSFORM_UNLOCK (smpte);
break;
}
case PROP_BORDER:
@ -662,14 +738,10 @@ gst_smpte_alpha_set_property (GObject * object, guint prop_id,
depth = g_value_get_int (value);
GST_BASE_TRANSFORM_LOCK (smpte);
/* also lock with the object lock so that reading the property doesn't
* have to wait for the transform lock */
GST_OBJECT_LOCK (smpte);
gst_smpte_alpha_update_mask (smpte, smpte->type, smpte->invert,
depth, smpte->width, smpte->height);
GST_OBJECT_UNLOCK (smpte);
GST_BASE_TRANSFORM_UNLOCK (smpte);
break;
}
case PROP_POSITION:
@ -681,14 +753,10 @@ gst_smpte_alpha_set_property (GObject * object, guint prop_id,
gboolean invert;
invert = g_value_get_boolean (value);
GST_BASE_TRANSFORM_LOCK (smpte);
/* also lock with the object lock so that reading the property doesn't
* have to wait for the transform lock */
GST_OBJECT_LOCK (smpte);
gst_smpte_alpha_update_mask (smpte, smpte->type, invert,
smpte->depth, smpte->width, smpte->height);
GST_OBJECT_UNLOCK (smpte);
GST_BASE_TRANSFORM_UNLOCK (smpte);
break;
}
default:

View file

@ -63,8 +63,8 @@ struct _GstSMPTEAlpha {
GstMask *mask;
/* processing function */
void (*process) (GstSMPTEAlpha * smpte, const guint8 * in, guint8 * out,
GstMask * mask, gint width, gint height, gint border, gint pos);
void (*process) (GstSMPTEAlpha * smpte, const GstVideoFrame * in, GstVideoFrame * out,
GstMask * mask, gint border, gint pos);
};
struct _GstSMPTEAlphaClass {