zebrastripe: Add new GstVideoFilter2 base class

An experiment.  Not completely happy with it.
This commit is contained in:
David Schleef 2011-03-21 13:31:15 -07:00
parent b69450af92
commit a02c4d6c4c
5 changed files with 503 additions and 166 deletions

View file

@ -4,6 +4,8 @@ plugin_LTLIBRARIES = libgstvideofiltersbad.la
#include $(top_srcdir)/common/orc.mak
libgstvideofiltersbad_la_SOURCES = \
gstvideofilter2.c \
gstvideofilter2.h \
gstzebrastripe.c \
gstvideofiltersbad.c
#nodist_libgstvideofiltersbad_la_SOURCES = $(ORC_NODIST_SOURCES)

View file

@ -0,0 +1,290 @@
/* GStreamer
* Copyright (C) 2011 FIXME <fixme@example.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
/**
* SECTION:element-gstvideofilter2
*
* The videofilter2 element does FIXME stuff.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v fakesrc ! videofilter2 ! FIXME ! fakesink
* ]|
* FIXME Describe what the pipeline does.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include "gstvideofilter2.h"
GST_DEBUG_CATEGORY_STATIC (gst_video_filter2_debug_category);
#define GST_CAT_DEFAULT gst_video_filter2_debug_category
/* prototypes */
static void gst_video_filter2_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_video_filter2_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_video_filter2_dispose (GObject * object);
static void gst_video_filter2_finalize (GObject * object);
static GstCaps *gst_video_filter2_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps);
static gboolean
gst_video_filter2_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size);
static gboolean
gst_video_filter2_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps);
static gboolean gst_video_filter2_start (GstBaseTransform * trans);
static gboolean gst_video_filter2_stop (GstBaseTransform * trans);
static GstFlowReturn gst_video_filter2_transform (GstBaseTransform * trans,
GstBuffer * inbuf, GstBuffer * outbuf);
static GstFlowReturn gst_video_filter2_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
enum
{
PROP_0
};
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_video_filter2_debug_category, "videofilter2", 0, \
"debug category for videofilter2 element");
GST_BOILERPLATE_FULL (GstVideoFilter2, gst_video_filter2, GstBaseTransform,
GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
static void
gst_video_filter2_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
int i;
GstCaps *caps = NULL;
caps = gst_caps_new_empty ();
for (i = GST_VIDEO_FORMAT_I420; i <= GST_VIDEO_FORMAT_AYUV; i++) {
gst_caps_append (caps, gst_video_format_new_template_caps (i));
}
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_caps_ref (caps)));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps));
}
static void
gst_video_filter2_class_init (GstVideoFilter2Class * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_video_filter2_set_property;
gobject_class->get_property = gst_video_filter2_get_property;
gobject_class->dispose = gst_video_filter2_dispose;
gobject_class->finalize = gst_video_filter2_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_filter2_transform_caps);
base_transform_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_video_filter2_get_unit_size);
base_transform_class->set_caps =
GST_DEBUG_FUNCPTR (gst_video_filter2_set_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_filter2_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_filter2_stop);
base_transform_class->transform =
GST_DEBUG_FUNCPTR (gst_video_filter2_transform);
base_transform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_video_filter2_transform_ip);
}
static void
gst_video_filter2_init (GstVideoFilter2 * videofilter2,
GstVideoFilter2Class * videofilter2_class)
{
gst_base_transform_set_qos_enabled (GST_BASE_TRANSFORM (videofilter2), TRUE);
}
void
gst_video_filter2_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstVideoFilter2 *videofilter2;
g_return_if_fail (GST_IS_VIDEO_FILTER2 (object));
videofilter2 = GST_VIDEO_FILTER2 (object);
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_video_filter2_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstVideoFilter2 *videofilter2;
g_return_if_fail (GST_IS_VIDEO_FILTER2 (object));
videofilter2 = GST_VIDEO_FILTER2 (object);
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_video_filter2_dispose (GObject * object)
{
GstVideoFilter2 *videofilter2;
g_return_if_fail (GST_IS_VIDEO_FILTER2 (object));
videofilter2 = GST_VIDEO_FILTER2 (object);
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_video_filter2_finalize (GObject * object)
{
GstVideoFilter2 *videofilter2;
g_return_if_fail (GST_IS_VIDEO_FILTER2 (object));
videofilter2 = GST_VIDEO_FILTER2 (object);
/* clean up object here */
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
gst_video_filter2_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
return gst_caps_ref (caps);
}
static gboolean
gst_video_filter2_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size)
{
GstVideoFormat format;
gint width, height;
gboolean ret;
ret = gst_video_format_parse_caps (caps, &format, &width, &height);
*size = gst_video_format_get_size (format, width, height);
return ret;
}
static gboolean
gst_video_filter2_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps)
{
GstVideoFilter2 *videofilter2;
gboolean ret;
int width;
int height;
GstVideoFormat format;
g_return_val_if_fail (GST_IS_VIDEO_FILTER2 (trans), FALSE);
videofilter2 = GST_VIDEO_FILTER2 (trans);
ret = gst_video_format_parse_caps (incaps, &format, &width, &height);
if (ret) {
videofilter2->format = format;
videofilter2->width = width;
videofilter2->height = height;
}
return ret;
}
static gboolean
gst_video_filter2_start (GstBaseTransform * trans)
{
return FALSE;
}
static gboolean
gst_video_filter2_stop (GstBaseTransform * trans)
{
return FALSE;
}
static GstFlowReturn
gst_video_filter2_transform (GstBaseTransform * trans, GstBuffer * inbuf,
GstBuffer * outbuf)
{
return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_video_filter2_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
GstVideoFilter2 *video_filter2 = GST_VIDEO_FILTER2 (trans);
GstVideoFilter2Class *klass =
GST_VIDEO_FILTER2_CLASS (G_OBJECT_GET_CLASS (trans));
int i;
GstFlowReturn ret;
for (i = 0; klass->functions[i].format != GST_VIDEO_FORMAT_UNKNOWN; i++) {
if (klass->functions[i].format == video_filter2->format) {
ret = klass->functions[i].filter_ip (video_filter2, buf, 0,
video_filter2->height);
return ret;
}
}
return GST_FLOW_ERROR;
}
/* API */
void
gst_video_filter2_class_add_functions (GstVideoFilter2Class * klass,
const GstVideoFilter2Functions * functions)
{
klass->functions = functions;
}

View file

@ -0,0 +1,81 @@
/* GStreamer
* Copyright (C) 2011 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VIDEO_FILTER2_H_
#define _GST_VIDEO_FILTER2_H_
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
#define GST_TYPE_VIDEO_FILTER2 (gst_video_filter2_get_type())
#define GST_VIDEO_FILTER2(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_FILTER2,GstVideoFilter2))
#define GST_VIDEO_FILTER2_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_FILTER2,GstVideoFilter2Class))
#define GST_IS_VIDEO_FILTER2(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_FILTER2))
#define GST_IS_VIDEO_FILTER2_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_FILTER2))
typedef struct _GstVideoFilter2 GstVideoFilter2;
typedef struct _GstVideoFilter2Class GstVideoFilter2Class;
typedef struct _GstVideoFilter2Functions GstVideoFilter2Functions;
struct _GstVideoFilter2
{
GstBaseTransform base_videofilter2;
GstVideoFormat format;
int width;
int height;
gpointer _gst_reserved[GST_PADDING_LARGE];
};
struct _GstVideoFilter2Class
{
GstBaseTransformClass base_videofilter2_class;
const GstVideoFilter2Functions *functions;
GstFlowReturn (*prefilter) (GstVideoFilter2 *filter, GstBuffer *inbuf);
gpointer _gst_reserved[GST_PADDING_LARGE];
};
struct _GstVideoFilter2Functions
{
GstVideoFormat format;
GstFlowReturn (*filter) (GstVideoFilter2 *filter, GstBuffer *inbuf,
GstBuffer *outbuf, int start, int end);
GstFlowReturn (*filter_ip) (GstVideoFilter2 *filter, GstBuffer *buffer,
int start, int end);
gpointer _gst_reserved[GST_PADDING_LARGE];
};
#define GST_VIDEO_FILTER2_FORMAT(vf) (((GstVideoFilter2 *)vf)->format)
#define GST_VIDEO_FILTER2_WIDTH(vf) (((GstVideoFilter2 *)vf)->width)
#define GST_VIDEO_FILTER2_HEIGHT(vf) (((GstVideoFilter2 *)vf)->height)
GType gst_video_filter2_get_type (void);
void gst_video_filter2_class_add_functions (GstVideoFilter2Class *klass,
const GstVideoFilter2Functions *functions);
G_END_DECLS
#endif

View file

@ -62,19 +62,15 @@ static void gst_zebra_stripe_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_zebra_stripe_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_zebra_stripe_dispose (GObject * object);
static void gst_zebra_stripe_finalize (GObject * object);
static gboolean
gst_zebra_stripe_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size);
static gboolean
gst_zebra_stripe_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps);
static gboolean gst_zebra_stripe_start (GstBaseTransform * trans);
static gboolean gst_zebra_stripe_stop (GstBaseTransform * trans);
static GstFlowReturn gst_zebra_stripe_transform_ip (GstBaseTransform * trans,
GstBuffer * buf);
static GstFlowReturn
gst_zebra_stripe_prefilter (GstVideoFilter2 * videofilter2, GstBuffer * buf);
static GstVideoFilter2Functions gst_zebra_stripe_filter_functions[];
enum
{
@ -84,44 +80,20 @@ enum
#define DEFAULT_THRESHOLD 90
/* pad templates */
static GstStaticPadTemplate gst_zebra_stripe_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
("{I420,YV12,Y41B,Y42B,NV12,NV21,YUV9,YVU9,Y444,UYVY,YVYU,YUY2,AYUV}"))
);
static GstStaticPadTemplate gst_zebra_stripe_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
("{I420,YV12,Y41B,Y42B,NV12,NV21,YUV9,YVU9,Y444,UYVY,YVYU,YUY2,AYUV}"))
);
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_zebra_stripe_debug_category, "zebrastripe", 0, \
"debug category for zebrastripe element");
GST_BOILERPLATE_FULL (GstZebraStripe, gst_zebra_stripe, GstBaseTransform,
GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
GST_BOILERPLATE_FULL (GstZebraStripe, gst_zebra_stripe, GstVideoFilter2,
GST_TYPE_VIDEO_FILTER2, DEBUG_INIT);
static void
gst_zebra_stripe_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_zebra_stripe_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_zebra_stripe_src_template));
gst_element_class_set_details_simple (element_class, "Zebra stripe overlay",
"Filter/Analysis",
"Overlays zebra striping on overexposed areas of video",
@ -132,21 +104,18 @@ static void
gst_zebra_stripe_class_init (GstZebraStripeClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstVideoFilter2Class *video_filter2_class = GST_VIDEO_FILTER2_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
gobject_class->set_property = gst_zebra_stripe_set_property;
gobject_class->get_property = gst_zebra_stripe_get_property;
gobject_class->dispose = gst_zebra_stripe_dispose;
gobject_class->finalize = gst_zebra_stripe_finalize;
base_transform_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_get_unit_size);
base_transform_class->set_caps =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_set_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_zebra_stripe_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_zebra_stripe_stop);
base_transform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_transform_ip);
video_filter2_class->prefilter =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_prefilter);
g_object_class_install_property (gobject_class, PROP_THRESHOLD,
g_param_spec_int ("threshold", "Threshold",
@ -154,6 +123,8 @@ gst_zebra_stripe_class_init (GstZebraStripeClass * klass)
DEFAULT_THRESHOLD,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
gst_video_filter2_class_add_functions (video_filter2_class,
gst_zebra_stripe_filter_functions);
}
static void
@ -161,12 +132,6 @@ gst_zebra_stripe_init (GstZebraStripe * zebrastripe,
GstZebraStripeClass * zebrastripe_class)
{
zebrastripe->sinkpad =
gst_pad_new_from_static_template (&gst_zebra_stripe_sink_template,
"sink");
zebrastripe->srcpad =
gst_pad_new_from_static_template (&gst_zebra_stripe_src_template, "src");
}
void
@ -181,6 +146,8 @@ gst_zebra_stripe_set_property (GObject * object, guint property_id,
switch (property_id) {
case PROP_THRESHOLD:
zebrastripe->threshold = g_value_get_int (value);
zebrastripe->y_threshold =
16 + floor (0.5 + 2.19 * zebrastripe->threshold);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
@ -207,19 +174,6 @@ gst_zebra_stripe_get_property (GObject * object, guint property_id,
}
}
void
gst_zebra_stripe_dispose (GObject * object)
{
GstZebraStripe *zebrastripe;
g_return_if_fail (GST_IS_ZEBRA_STRIPE (object));
zebrastripe = GST_ZEBRA_STRIPE (object);
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_zebra_stripe_finalize (GObject * object)
{
@ -234,39 +188,6 @@ gst_zebra_stripe_finalize (GObject * object)
}
static gboolean
gst_zebra_stripe_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size)
{
int width, height;
GstVideoFormat format;
gboolean ret;
ret = gst_video_format_parse_caps (caps, &format, &width, &height);
*size = gst_video_format_get_size (format, width, height);
return ret;
}
static gboolean
gst_zebra_stripe_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
int width, height;
GstVideoFormat format;
gboolean ret;
ret = gst_video_format_parse_caps (incaps, &format, &width, &height);
if (ret) {
zebrastripe->format = format;
zebrastripe->width = width;
zebrastripe->height = height;
}
return ret;
}
static gboolean
gst_zebra_stripe_start (GstBaseTransform * trans)
{
@ -282,72 +203,120 @@ gst_zebra_stripe_stop (GstBaseTransform * trans)
}
static GstFlowReturn
gst_zebra_stripe_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
gst_zebra_stripe_prefilter (GstVideoFilter2 * videofilter2, GstBuffer * buf)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
int i, j;
int threshold;
int t;
guint8 *ydata;
int ystride;
threshold = 16 + floor (0.5 + 2.19 * zebrastripe->threshold);
t = zebrastripe->t;
ydata = GST_BUFFER_DATA (buf);
ystride = gst_video_format_get_row_stride (zebrastripe->format,
0, zebrastripe->width);
switch (zebrastripe->format) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B:
case GST_VIDEO_FORMAT_Y42B:
case GST_VIDEO_FORMAT_NV12:
case GST_VIDEO_FORMAT_NV21:
case GST_VIDEO_FORMAT_YUV9:
case GST_VIDEO_FORMAT_YVU9:
case GST_VIDEO_FORMAT_Y444:
for (j = 0; j < zebrastripe->height; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < zebrastripe->width; i++) {
if (data[i] >= threshold) {
if ((i + j + t) & 0x4)
data[i] = 16;
}
}
}
break;
case GST_VIDEO_FORMAT_UYVY:
ydata++;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_YVYU:
for (j = 0; j < zebrastripe->height; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < zebrastripe->width; i++) {
if (data[2 * i] >= threshold) {
if ((i + j + t) & 0x4)
data[2 * i] = 16;
}
}
}
break;
case GST_VIDEO_FORMAT_AYUV:
ydata++;
for (j = 0; j < zebrastripe->height; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < zebrastripe->width; i++) {
if (data[4 * i] >= threshold) {
if ((i + j + t) & 0x4)
data[4 * i] = 16;
}
}
}
break;
default:
g_assert_not_reached ();
}
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (videofilter2);
zebrastripe->t++;
return GST_FLOW_OK;
}
static GstFlowReturn
gst_zebra_stripe_filter_ip_planarY (GstVideoFilter2 * videofilter2,
GstBuffer * buf, int start, int end)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (videofilter2);
int width = GST_VIDEO_FILTER2_WIDTH (zebrastripe);
int i, j;
int threshold = zebrastripe->y_threshold;
int t = zebrastripe->t;
guint8 *ydata;
int ystride;
ydata = GST_BUFFER_DATA (buf);
ystride =
gst_video_format_get_row_stride (GST_VIDEO_FILTER2_FORMAT (videofilter2),
0, width);
for (j = start; j < end; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < width; i++) {
if (data[i] >= threshold) {
if ((i + j + t) & 0x4)
data[i] = 16;
}
}
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_zebra_stripe_filter_ip_YxYy (GstVideoFilter2 * videofilter2,
GstBuffer * buf, int start, int end)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (videofilter2);
GstVideoFormat format = GST_VIDEO_FILTER2_FORMAT (zebrastripe);
int width = GST_VIDEO_FILTER2_WIDTH (zebrastripe);
int i, j;
int threshold = zebrastripe->y_threshold;
int t = zebrastripe->t;
guint8 *ydata;
int ystride;
ydata = GST_BUFFER_DATA (buf);
ystride = gst_video_format_get_row_stride (format, 0, width);
if (format == GST_VIDEO_FORMAT_UYVY) {
ydata++;
}
for (j = start; j < end; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < width; i++) {
if (data[2 * i] >= threshold) {
if ((i + j + t) & 0x4)
data[2 * i] = 16;
}
}
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_zebra_stripe_filter_ip_AYUV (GstVideoFilter2 * videofilter2,
GstBuffer * buf, int start, int end)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (videofilter2);
int width = GST_VIDEO_FILTER2_WIDTH (zebrastripe);
int i, j;
int threshold = zebrastripe->y_threshold;
int t = zebrastripe->t;
guint8 *ydata;
int ystride;
ydata = GST_BUFFER_DATA (buf);
ystride =
gst_video_format_get_row_stride (GST_VIDEO_FILTER2_FORMAT (videofilter2),
0, width);
ydata++;
for (j = start; j < end; j++) {
guint8 *data = ydata + ystride * j;
for (i = 0; i < width; i++) {
if (data[4 * i] >= threshold) {
if ((i + j + t) & 0x4)
data[4 * i] = 16;
}
}
}
return GST_FLOW_OK;
}
static GstVideoFilter2Functions gst_zebra_stripe_filter_functions[] = {
{GST_VIDEO_FORMAT_I420, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_YV12, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_Y41B, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_Y42B, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_NV12, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_NV21, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_YUV9, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_YVU9, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_Y444, NULL, gst_zebra_stripe_filter_ip_planarY},
{GST_VIDEO_FORMAT_UYVY, NULL, gst_zebra_stripe_filter_ip_YxYy},
{GST_VIDEO_FORMAT_YUY2, NULL, gst_zebra_stripe_filter_ip_YxYy},
{GST_VIDEO_FORMAT_YVYU, NULL, gst_zebra_stripe_filter_ip_YxYy},
{GST_VIDEO_FORMAT_AYUV, NULL, gst_zebra_stripe_filter_ip_AYUV},
{GST_VIDEO_FORMAT_UNKNOWN}
};

View file

@ -20,7 +20,7 @@
#ifndef _GST_ZEBRA_STRIPE_H_
#define _GST_ZEBRA_STRIPE_H_
#include <gst/base/gstbasetransform.h>
#include "gstvideofilter2.h"
#include <gst/video/video.h>
G_BEGIN_DECLS
@ -36,25 +36,20 @@ typedef struct _GstZebraStripeClass GstZebraStripeClass;
struct _GstZebraStripe
{
GstBaseTransform base_zebrastripe;
GstPad *sinkpad;
GstPad *srcpad;
GstVideoFilter2 video_filter2;
/* properties */
int threshold;
/* state */
GstVideoFormat format;
int width;
int height;
int t;
int y_threshold;
};
struct _GstZebraStripeClass
{
GstBaseTransformClass base_zebrastripe_class;
GstVideoFilter2Class video_filter2_class;
};
GType gst_zebra_stripe_get_type (void);