VideoFilter inherits from

Original commit message from CVS:
2005-11-23  Julien MOUTTE  <julien@moutte.net>

* ext/cairo/gsttimeoverlay.c:
(gst_timeoverlay_update_font_height),
(gst_timeoverlay_set_caps), (gst_timeoverlay_get_unit_size),
(gst_timeoverlay_transform), (gst_timeoverlay_base_init),
(gst_timeoverlay_class_init), (gst_timeoverlay_init),
(gst_timeoverlay_get_type):
* ext/cairo/gsttimeoverlay.h:
* gst/debug/Makefile.am:
* gst/debug/gstnavigationtest.c:
(gst_navigationtest_handle_src_event),
(gst_navigationtest_get_unit_size),
(gst_navigationtest_set_caps),
(gst_navigationtest_transform),
(gst_navigationtest_change_state),
(gst_navigationtest_base_init), (gst_navigationtest_class_init),
(gst_navigationtest_init), (gst_navigationtest_get_type),
(plugin_init):
* gst/debug/gstnavigationtest.h:
* gst/effectv/Makefile.am:
* gst/effectv/gstaging.c: (gst_agingtv_set_caps),
(gst_agingtv_get_unit_size), (gst_agingtv_transform),
(gst_agingtv_base_init), (gst_agingtv_class_init),
(gst_agingtv_init), (gst_agingtv_get_type):
* gst/effectv/gstdice.c: (gst_dicetv_set_caps),
(gst_dicetv_get_unit_size), (gst_dicetv_transform),
(gst_dicetv_base_init), (gst_dicetv_class_init),
(gst_dicetv_init),
(gst_dicetv_get_type):
* gst/effectv/gstedge.c: (gst_edgetv_set_caps),
(gst_edgetv_get_unit_size), (gst_edgetv_transform),
(gst_edgetv_base_init), (gst_edgetv_class_init),
(gst_edgetv_init),
(gst_edgetv_get_type):
* gst/effectv/gsteffectv.c:
* gst/effectv/gsteffectv.h:
* gst/effectv/gstquark.c: (gst_quarktv_set_caps),
(gst_quarktv_get_unit_size), (fastrand),
(gst_quarktv_transform),
(gst_quarktv_change_state), (gst_quarktv_base_init),
(gst_quarktv_class_init), (gst_quarktv_init),
(gst_quarktv_get_type):
* gst/effectv/gstrev.c: (gst_revtv_set_caps),
(gst_revtv_get_unit_size), (gst_revtv_transform),
(gst_revtv_base_init), (gst_revtv_class_init), (gst_revtv_init),
(gst_revtv_get_type):
* gst/effectv/gstshagadelic.c: (gst_shagadelictv_set_caps),
(gst_shagadelictv_get_unit_size), (gst_shagadelictv_transform),
(gst_shagadelictv_base_init), (gst_shagadelictv_class_init),
(gst_shagadelictv_init), (gst_shagadelictv_get_type):
* gst/effectv/gstvertigo.c: (gst_vertigotv_set_caps),
(gst_vertigotv_get_unit_size), (gst_vertigotv_transform),
(gst_vertigotv_base_init), (gst_vertigotv_class_init),
(gst_vertigotv_init), (gst_vertigotv_get_type):
* gst/effectv/gstwarp.c: (gst_warptv_set_caps),
(gst_warptv_get_unit_size), (gst_warptv_transform),
(gst_warptv_base_init), (gst_warptv_class_init),
(gst_warptv_init),
(gst_warptv_get_type):
* gst/videofilter/Makefile.am:
* gst/videofilter/gstvideobalance.c:
* gst/videofilter/gstvideobalance.h:
* gst/videofilter/gstvideofilter.c: (gst_videofilter_get_type),
(gst_videofilter_class_init), (gst_videofilter_init):
* gst/videofilter/gstvideofilter.h:
* gst/videofilter/gstvideoflip.c: (gst_videoflip_set_caps),
(gst_videoflip_transform_caps), (gst_videoflip_get_unit_size),
(gst_videoflip_flip), (gst_videoflip_transform),
(gst_videoflip_handle_src_event), (gst_videoflip_set_property),
(gst_videoflip_base_init), (gst_videoflip_class_init),
(gst_videoflip_init), (plugin_init), (gst_videoflip_get_type):
* gst/videofilter/gstvideoflip.h: VideoFilter inherits from
BaseTransform, it's just a place holder for now and every video
effect plugin has been ported to use BaseTransform features
directly. QuarkTV was fixed too (was broken), navigationtest
works
and best for the end, videoflip converts navigation events
depending
on flip method ! Fixes #320953
This commit is contained in:
Julien Moutte 2005-11-23 15:50:51 +00:00
parent 48520a455d
commit 2ea4f5b3c9
24 changed files with 1949 additions and 2356 deletions

View file

@ -1,3 +1,75 @@
2005-11-23 Julien MOUTTE <julien@moutte.net>
* ext/cairo/gsttimeoverlay.c: (gst_timeoverlay_update_font_height),
(gst_timeoverlay_set_caps), (gst_timeoverlay_get_unit_size),
(gst_timeoverlay_transform), (gst_timeoverlay_base_init),
(gst_timeoverlay_class_init), (gst_timeoverlay_init),
(gst_timeoverlay_get_type):
* ext/cairo/gsttimeoverlay.h:
* gst/debug/Makefile.am:
* gst/debug/gstnavigationtest.c:
(gst_navigationtest_handle_src_event),
(gst_navigationtest_get_unit_size), (gst_navigationtest_set_caps),
(gst_navigationtest_transform), (gst_navigationtest_change_state),
(gst_navigationtest_base_init), (gst_navigationtest_class_init),
(gst_navigationtest_init), (gst_navigationtest_get_type),
(plugin_init):
* gst/debug/gstnavigationtest.h:
* gst/effectv/Makefile.am:
* gst/effectv/gstaging.c: (gst_agingtv_set_caps),
(gst_agingtv_get_unit_size), (gst_agingtv_transform),
(gst_agingtv_base_init), (gst_agingtv_class_init),
(gst_agingtv_init), (gst_agingtv_get_type):
* gst/effectv/gstdice.c: (gst_dicetv_set_caps),
(gst_dicetv_get_unit_size), (gst_dicetv_transform),
(gst_dicetv_base_init), (gst_dicetv_class_init), (gst_dicetv_init),
(gst_dicetv_get_type):
* gst/effectv/gstedge.c: (gst_edgetv_set_caps),
(gst_edgetv_get_unit_size), (gst_edgetv_transform),
(gst_edgetv_base_init), (gst_edgetv_class_init), (gst_edgetv_init),
(gst_edgetv_get_type):
* gst/effectv/gsteffectv.c:
* gst/effectv/gsteffectv.h:
* gst/effectv/gstquark.c: (gst_quarktv_set_caps),
(gst_quarktv_get_unit_size), (fastrand), (gst_quarktv_transform),
(gst_quarktv_change_state), (gst_quarktv_base_init),
(gst_quarktv_class_init), (gst_quarktv_init),
(gst_quarktv_get_type):
* gst/effectv/gstrev.c: (gst_revtv_set_caps),
(gst_revtv_get_unit_size), (gst_revtv_transform),
(gst_revtv_base_init), (gst_revtv_class_init), (gst_revtv_init),
(gst_revtv_get_type):
* gst/effectv/gstshagadelic.c: (gst_shagadelictv_set_caps),
(gst_shagadelictv_get_unit_size), (gst_shagadelictv_transform),
(gst_shagadelictv_base_init), (gst_shagadelictv_class_init),
(gst_shagadelictv_init), (gst_shagadelictv_get_type):
* gst/effectv/gstvertigo.c: (gst_vertigotv_set_caps),
(gst_vertigotv_get_unit_size), (gst_vertigotv_transform),
(gst_vertigotv_base_init), (gst_vertigotv_class_init),
(gst_vertigotv_init), (gst_vertigotv_get_type):
* gst/effectv/gstwarp.c: (gst_warptv_set_caps),
(gst_warptv_get_unit_size), (gst_warptv_transform),
(gst_warptv_base_init), (gst_warptv_class_init), (gst_warptv_init),
(gst_warptv_get_type):
* gst/videofilter/Makefile.am:
* gst/videofilter/gstvideobalance.c:
* gst/videofilter/gstvideobalance.h:
* gst/videofilter/gstvideofilter.c: (gst_videofilter_get_type),
(gst_videofilter_class_init), (gst_videofilter_init):
* gst/videofilter/gstvideofilter.h:
* gst/videofilter/gstvideoflip.c: (gst_videoflip_set_caps),
(gst_videoflip_transform_caps), (gst_videoflip_get_unit_size),
(gst_videoflip_flip), (gst_videoflip_transform),
(gst_videoflip_handle_src_event), (gst_videoflip_set_property),
(gst_videoflip_base_init), (gst_videoflip_class_init),
(gst_videoflip_init), (plugin_init), (gst_videoflip_get_type):
* gst/videofilter/gstvideoflip.h: VideoFilter inherits from
BaseTransform, it's just a place holder for now and every video
effect plugin has been ported to use BaseTransform features
directly. QuarkTV was fixed too (was broken), navigationtest works
and best for the end, videoflip converts navigation events depending
on flip method ! Fixes #320953
2005-11-23 Jan Schmidt <thaytan@mad.scientist.com>
* ext/aalib/gstaasink.c: (gst_aasink_fixate):

View file

@ -27,179 +27,47 @@
#include "config.h"
#endif
/*#define DEBUG_ENABLED */
#include <gsttimeoverlay.h>
#include <string.h>
#include <math.h>
#include <cairo.h>
#include <gst/video/video.h>
/* GstTimeoverlay signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
static GstElementDetails timeoverlay_details =
GST_ELEMENT_DETAILS ("Time Overlay",
"Filter/Editor/Video",
"Overlays the time on a video stream",
"David Schleef <ds@schleef.org>");
enum
{
ARG_0
/* FILL ME */
};
static GstStaticPadTemplate gst_timeoverlay_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static void gst_timeoverlay_base_init (gpointer g_class);
static void gst_timeoverlay_class_init (gpointer g_class, gpointer class_data);
static void gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class);
static void gst_timeoverlay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_timeoverlay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest,
void *src);
static void gst_timeoverlay_setup (GstVideofilter * videofilter);
GType
gst_timeoverlay_get_type (void)
{
static GType timeoverlay_type = 0;
if (!timeoverlay_type) {
static const GTypeInfo timeoverlay_info = {
sizeof (GstTimeoverlayClass),
gst_timeoverlay_base_init,
NULL,
gst_timeoverlay_class_init,
NULL,
NULL,
sizeof (GstTimeoverlay),
0,
gst_timeoverlay_init,
};
timeoverlay_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstTimeoverlay", &timeoverlay_info, 0);
}
return timeoverlay_type;
}
static GstVideofilterFormat gst_timeoverlay_formats[] = {
{"I420", 12, gst_timeoverlay_planar411,},
};
static GstStaticPadTemplate gst_timeoverlay_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static GstVideofilterClass *parent_class = NULL;
static void
gst_timeoverlay_base_init (gpointer g_class)
gst_timeoverlay_update_font_height (GstTimeoverlay * timeoverlay)
{
static GstElementDetails timeoverlay_details =
GST_ELEMENT_DETAILS ("Time Overlay",
"Filter/Editor/Video",
"Overlays the time on a video stream",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &timeoverlay_details);
for (i = 0; i < G_N_ELEMENTS (gst_timeoverlay_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_timeoverlay_formats + i);
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
}
static void
gst_timeoverlay_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
#if 0
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_TIMEOVERLAY_METHOD, GST_TIMEOVERLAY_METHOD_1,
G_PARAM_READWRITE));
#endif
gobject_class->set_property = gst_timeoverlay_set_property;
gobject_class->get_property = gst_timeoverlay_get_property;
videofilter_class->setup = gst_timeoverlay_setup;
}
static void
gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class)
{
GstTimeoverlay *timeoverlay = GST_TIMEOVERLAY (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_timeoverlay_init");
videofilter = GST_VIDEOFILTER (timeoverlay);
/* do stuff */
}
static void
gst_timeoverlay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstTimeoverlay *src;
g_return_if_fail (GST_IS_TIMEOVERLAY (object));
src = GST_TIMEOVERLAY (object);
GST_DEBUG ("gst_timeoverlay_set_property");
switch (prop_id) {
#if 0
case ARG_METHOD:
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
}
}
static void
gst_timeoverlay_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstTimeoverlay *src;
g_return_if_fail (GST_IS_TIMEOVERLAY (object));
src = GST_TIMEOVERLAY (object);
switch (prop_id) {
#if 0
case ARG_METHOD:
g_value_set_enum (value, src->method);
break;
#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_timeoverlay_update_font_height (GstVideofilter * videofilter)
{
GstTimeoverlay *timeoverlay = GST_TIMEOVERLAY (videofilter);
gint width, height;
cairo_surface_t *font_surface;
cairo_t *font_cairo;
cairo_font_extents_t font_extents;
width = gst_videofilter_get_input_width (videofilter);
height = gst_videofilter_get_input_height (videofilter);
width = timeoverlay->width;
height = timeoverlay->height;
font_surface =
cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height);
@ -216,15 +84,58 @@ gst_timeoverlay_update_font_height (GstVideofilter * videofilter)
font_cairo = NULL;
}
static void
gst_timeoverlay_setup (GstVideofilter * videofilter)
static gboolean
gst_timeoverlay_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstTimeoverlay *timeoverlay;
GstTimeoverlay *filter = GST_TIMEOVERLAY (btrans);
GstStructure *structure;
gboolean ret = FALSE;
g_return_if_fail (GST_IS_TIMEOVERLAY (videofilter));
timeoverlay = GST_TIMEOVERLAY (videofilter);
structure = gst_caps_get_structure (incaps, 0);
gst_timeoverlay_update_font_height (videofilter);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
gst_timeoverlay_update_font_height (filter);
ret = TRUE;
}
return ret;
}
/* Useful macros */
#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
gst_timeoverlay_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstTimeoverlay *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
filter = GST_TIMEOVERLAY (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static char *
@ -250,8 +161,9 @@ gst_timeoverlay_print_smpte_time (guint64 time)
}
static void
gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
static GstFlowReturn
gst_timeoverlay_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstTimeoverlay *timeoverlay;
int width;
@ -261,15 +173,20 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
int i, j;
unsigned char *image;
cairo_text_extents_t extents;
gpointer dest, src;
cairo_surface_t *font_surface;
cairo_t *text_cairo;
GstFlowReturn ret = GST_FLOW_OK;
g_return_if_fail (GST_IS_TIMEOVERLAY (videofilter));
timeoverlay = GST_TIMEOVERLAY (videofilter);
timeoverlay = GST_TIMEOVERLAY (trans);
width = gst_videofilter_get_input_width (videofilter);
height = gst_videofilter_get_input_height (videofilter);
gst_buffer_stamp (out, in);
src = GST_BUFFER_DATA (in);
dest = GST_BUFFER_DATA (out);
width = timeoverlay->width;
height = timeoverlay->height;
/* create surface for font rendering */
/* FIXME: preparation of the surface could also be done once when settings
@ -292,9 +209,7 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_fill (text_cairo);
cairo_restore (text_cairo);
string =
gst_timeoverlay_print_smpte_time (GST_BUFFER_TIMESTAMP (videofilter->
in_buf));
string = gst_timeoverlay_print_smpte_time (GST_BUFFER_TIMESTAMP (in));
cairo_save (text_cairo);
cairo_select_font_face (text_cairo, "monospace", 0, 0);
cairo_set_font_size (text_cairo, 20);
@ -303,12 +218,6 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_move_to (text_cairo, 0, timeoverlay->text_height - 2);
cairo_show_text (text_cairo, string);
g_free (string);
#if 0
cairo_text_path (timeoverlay->cr, string);
cairo_set_rgb_color (timeoverlay->cr, 1, 1, 1);
cairo_set_line_width (timeoverlay->cr, 1.0);
cairo_stroke (timeoverlay->cr);
#endif
cairo_restore (text_cairo);
@ -317,7 +226,7 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
if (b_width > width)
b_width = width;
memcpy (dest, src, videofilter->from_buf_size);
memcpy (dest, src, GST_BUFFER_SIZE (in));
for (i = 0; i < timeoverlay->text_height; i++) {
for (j = 0; j < b_width; j++) {
((unsigned char *) dest)[i * width + j] = image[(i * width + j) * 4 + 0];
@ -332,4 +241,67 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_destroy (text_cairo);
text_cairo = NULL;
g_free (image);
return ret;
}
static void
gst_timeoverlay_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &timeoverlay_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_timeoverlay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_timeoverlay_src_template));
}
static void
gst_timeoverlay_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_timeoverlay_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_timeoverlay_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_timeoverlay_transform);
}
static void
gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class)
{
}
GType
gst_timeoverlay_get_type (void)
{
static GType timeoverlay_type = 0;
if (!timeoverlay_type) {
static const GTypeInfo timeoverlay_info = {
sizeof (GstTimeoverlayClass),
gst_timeoverlay_base_init,
NULL,
gst_timeoverlay_class_init,
NULL,
NULL,
sizeof (GstTimeoverlay),
0,
gst_timeoverlay_init,
};
timeoverlay_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstTimeoverlay", &timeoverlay_info, 0);
}
return timeoverlay_type;
}

View file

@ -21,13 +21,11 @@
#ifndef __GST_TIMEOVERLAY_H__
#define __GST_TIMEOVERLAY_H__
#include <gst/gst.h>
#include <cairo.h>
#include "gstvideofilter.h"
G_BEGIN_DECLS
#define GST_TYPE_TIMEOVERLAY \
@ -47,6 +45,8 @@ typedef struct _GstTimeoverlayClass GstTimeoverlayClass;
struct _GstTimeoverlay {
GstVideofilter videofilter;
gint width, height;
cairo_surface_t *surface;
cairo_t *cr;
int text_height;
@ -62,4 +62,3 @@ GType gst_timeoverlay_get_type(void);
G_END_DECLS
#endif /* __GST_TIMEOVERLAY_H__ */

View file

@ -14,8 +14,12 @@ libgstefence_la_LIBADD = $(GST_LIBS)
libgstefence_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstnavigationtest_la_SOURCES = gstnavigationtest.c
libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) -I$(top_srcdir)/gst/videofilter
libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
-I$(top_srcdir)/gst/videofilter
libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
$(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgstnavigationtest_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdebug_la_SOURCES = \

View file

@ -18,122 +18,40 @@
* Boston, MA 02111-1307, USA.
*/
/*
* This file was (probably) generated from gstnavigationtest.c,
* gstnavigationtest.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstnavigationtest.h>
#include "gstnavigationtest.h"
#include <string.h>
#include <math.h>
typedef struct
{
double x;
double y;
gint images_left;
guint8 cy, cu, cv;
} ButtonClick;
#include <gst/video/video.h>
static void gst_navigationtest_base_init (gpointer g_class);
static void gst_navigationtest_class_init (gpointer g_class,
gpointer class_data);
static void gst_navigationtest_init (GTypeInstance * instance,
gpointer g_class);
GST_DEBUG_CATEGORY (navigationtest_debug);
#define GST_CAT_DEFAULT navigationtest_debug
static gboolean gst_navigationtest_handle_src_event (GstPad * pad,
GstEvent * event);
static GstElementDetails navigationtest_details =
GST_ELEMENT_DETAILS ("Video Navigation test",
"Filter/Effect/Video",
"Handle navigation events showing a black square following mouse pointer",
"David Schleef <ds@schleef.org>");
static GstStateChangeReturn
gst_navigationtest_change_state (GstElement * element,
GstStateChange transition);
static GstStaticPadTemplate gst_navigationtest_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static void gst_navigationtest_planar411 (GstVideofilter * videofilter,
void *dest, void *src);
static void gst_navigationtest_setup (GstVideofilter * videofilter);
static GstStaticPadTemplate gst_navigationtest_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static GstVideofilterClass *parent_class; /* NULL */
GType
gst_navigationtest_get_type (void)
{
static GType navigationtest_type = 0;
if (!navigationtest_type) {
static const GTypeInfo navigationtest_info = {
sizeof (GstNavigationtestClass),
gst_navigationtest_base_init,
NULL,
gst_navigationtest_class_init,
NULL,
NULL,
sizeof (GstNavigationtest),
0,
gst_navigationtest_init,
};
navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstNavigationtest", &navigationtest_info, 0);
}
return navigationtest_type;
}
static GstVideofilterFormat gst_navigationtest_formats[] = {
{"I420", 12, gst_navigationtest_planar411,},
};
static void
gst_navigationtest_base_init (gpointer g_class)
{
static GstElementDetails navigationtest_details =
GST_ELEMENT_DETAILS ("Video Filter Template",
"Filter/Video",
"Template for a video filter",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &navigationtest_details);
for (i = 0; i < G_N_ELEMENTS (gst_navigationtest_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_navigationtest_formats + i);
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
}
static void
gst_navigationtest_class_init (gpointer g_class, gpointer class_data)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
parent_class = g_type_class_peek_parent (g_class);
element_class->change_state = gst_navigationtest_change_state;
videofilter_class->setup = gst_navigationtest_setup;
}
static void
gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
GstVideofilter *videofilter = GST_VIDEOFILTER (navtest);
gst_pad_set_event_function (videofilter->srcpad,
GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
navtest->x = -1;
navtest->y = -1;
}
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
@ -149,10 +67,8 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
const GstStructure *s = gst_event_get_structure (event);
gint fps_n, fps_d;
fps_n = gst_value_get_fraction_numerator (
(&GST_VIDEOFILTER (navtest)->framerate));
fps_d = gst_value_get_fraction_denominator (
(&GST_VIDEOFILTER (navtest)->framerate));
fps_n = gst_value_get_fraction_numerator ((&navtest->framerate));
fps_d = gst_value_get_fraction_denominator ((&navtest->framerate));
type = gst_structure_get_string (s, "event");
if (g_str_equal (type, "mouse-move")) {
@ -189,16 +105,63 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
return gst_pad_event_default (pad, event);
}
static void
gst_navigationtest_setup (GstVideofilter * videofilter)
/* Useful macros */
#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
gst_navigationtest_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstNavigationtest *navigationtest;
GstNavigationtest *navtest;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter));
navigationtest = GST_NAVIGATIONTEST (videofilter);
navtest = GST_NAVIGATIONTEST (btrans);
/* if any setup needs to be done, do it here */
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (navtest, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static gboolean
gst_navigationtest_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (btrans);
gboolean ret = FALSE;
GstStructure *structure;
structure = gst_caps_get_structure (incaps, 0);
if (gst_structure_get_int (structure, "width", &navtest->width) &&
gst_structure_get_int (structure, "height", &navtest->height)) {
const GValue *framerate;
framerate = gst_structure_get_value (structure, "framerate");
if (framerate && GST_VALUE_HOLDS_FRACTION (framerate)) {
g_value_copy (framerate, &navtest->framerate);
ret = TRUE;
}
}
return ret;
}
static void
@ -242,37 +205,37 @@ draw_box_planar411 (guint8 * dest, int width, int height, int x, int y,
}
}
static void
gst_navigationtest_planar411 (GstVideofilter * videofilter,
void *dest, void *src)
static GstFlowReturn
gst_navigationtest_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstNavigationtest *navtest = (GstNavigationtest *) videofilter;
gint width, height;
GstNavigationtest *navtest = GST_NAVIGATIONTEST (trans);
GSList *walk;
g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter));
width = gst_videofilter_get_input_width (videofilter);
height = gst_videofilter_get_input_height (videofilter);
GstFlowReturn ret = GST_FLOW_OK;
/* do something interesting here. This simply copies the source
* to the destination. */
memcpy (dest, src, width * height + (width / 2) * (height / 2) * 2);
gst_buffer_stamp (out, in);
memcpy (GST_BUFFER_DATA (out), GST_BUFFER_DATA (in),
MIN (GST_BUFFER_SIZE (in), GST_BUFFER_SIZE (out)));
walk = navtest->clicks;
while (walk) {
ButtonClick *click = walk->data;
walk = g_slist_next (walk);
draw_box_planar411 (dest, width, height, rint (click->x),
rint (click->y), click->cy, click->cu, click->cv);
draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
if (--click->images_left < 1) {
navtest->clicks = g_slist_remove (navtest->clicks, click);
g_free (click);
}
}
draw_box_planar411 (dest, width, height, rint (navtest->x),
rint (navtest->y), 0, 128, 128);
draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
rint (navtest->x), rint (navtest->y), 0, 128, 128);
return ret;
}
static GstStateChangeReturn
@ -282,12 +245,6 @@ gst_navigationtest_change_state (GstElement * element,
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstNavigationtest *navtest = GST_NAVIGATIONTEST (element);
/* upwards state changes */
switch (transition) {
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
@ -307,9 +264,84 @@ gst_navigationtest_change_state (GstElement * element,
return ret;
}
static void
gst_navigationtest_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &navigationtest_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_navigationtest_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_navigationtest_src_template));
}
static void
gst_navigationtest_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_navigationtest_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_navigationtest_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_navigationtest_transform);
}
static void
gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
gst_pad_set_event_function (btrans->srcpad,
GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
navtest->x = -1;
navtest->y = -1;
}
GType
gst_navigationtest_get_type (void)
{
static GType navigationtest_type = 0;
if (!navigationtest_type) {
static const GTypeInfo navigationtest_info = {
sizeof (GstNavigationtestClass),
gst_navigationtest_base_init,
NULL,
gst_navigationtest_class_init,
NULL,
NULL,
sizeof (GstNavigationtest),
0,
gst_navigationtest_init,
};
navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstNavigationtest", &navigationtest_info, 0);
}
return navigationtest_type;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (navigationtest_debug, "navigationtest", 0,
"navigationtest");
return gst_element_register (plugin, "navigationtest", GST_RANK_NONE,
GST_TYPE_NAVIGATIONTEST);
}

View file

@ -21,12 +21,8 @@
#ifndef __GST_NAVIGATIONTEST_H__
#define __GST_NAVIGATIONTEST_H__
#include <gst/gst.h>
#include "gstvideofilter.h"
G_BEGIN_DECLS
#define GST_TYPE_NAVIGATIONTEST \
@ -43,11 +39,21 @@ G_BEGIN_DECLS
typedef struct _GstNavigationtest GstNavigationtest;
typedef struct _GstNavigationtestClass GstNavigationtestClass;
typedef struct
{
gdouble x;
gdouble y;
gint images_left;
guint8 cy, cu, cv;
} ButtonClick;
struct _GstNavigationtest {
GstVideofilter videofilter;
double x;
double y;
gint width, height;
GValue framerate;
gdouble x, y;
GSList *clicks;
};
@ -61,4 +67,3 @@ GType gst_navigationtest_get_type(void);
G_END_DECLS
#endif /* __GST_NAVIGATIONTEST_H__ */

View file

@ -5,10 +5,12 @@ libgsteffectv_la_SOURCES = \
gstshagadelic.c gstvertigo.c gstrev.c gstquark.c
libgsteffectv_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
-I$(top_srcdir)/gst/videofilter
libgsteffectv_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgsteffectv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)

View file

@ -37,11 +37,13 @@
#include "config.h"
#endif
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <string.h>
#include <math.h>
#include <gst/video/video.h>
#define GST_TYPE_AGINGTV \
(gst_agingtv_get_type())
#define GST_AGINGTV(obj) \
@ -89,128 +91,69 @@ struct _GstAgingTVClass
GstVideofilterClass parent_class;
};
/* GstAgingTV signals and args */
enum
GType gst_agingtv_get_type (void);
static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
"Filter/Effect/Video",
"AgingTV adds age to video input using scratches and dust",
"Sam Lantinga <slouken@devolution.com>");
static GstStaticPadTemplate gst_agingtv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstStaticPadTemplate gst_agingtv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
/* FILL ME */
LAST_SIGNAL
};
GstAgingTV *filter = GST_AGINGTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
enum
{
ARG_0
/* FILL ME */
};
structure = gst_caps_get_structure (incaps, 0);
static void gst_agingtv_base_init (gpointer g_class);
static void gst_agingtv_class_init (gpointer g_class, gpointer class_data);
static void gst_agingtv_init (GTypeInstance * instance, gpointer g_class);
static void gst_agingtv_setup (GstVideofilter * videofilter);
static void gst_agingtv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_agingtv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
GType
gst_agingtv_get_type (void)
{
static GType agingtv_type = 0;
if (!agingtv_type) {
static const GTypeInfo agingtv_info = {
sizeof (GstAgingTVClass),
gst_agingtv_base_init,
NULL,
gst_agingtv_class_init,
NULL,
NULL,
sizeof (GstAgingTV),
0,
gst_agingtv_init,
};
agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstAgingTV", &agingtv_info, 0);
}
return agingtv_type;
}
static GstVideofilterFormat gst_agingtv_formats[] = {
{"RGB ", 32, gst_agingtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_agingtv_base_init (gpointer g_class)
{
static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
"Filter/Effect/Video",
"AgingTV adds age to video input using scratches and dust",
"Sam Lantinga <slouken@devolution.com>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &agingtv_details);
for (i = 0; i < G_N_ELEMENTS (gst_agingtv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_agingtv_formats + i);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_agingtv_class_init (gpointer g_class, gpointer class_data)
static gboolean
gst_agingtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
GstAgingTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
filter = GST_AGINGTV (btrans);
gobject_class->set_property = gst_agingtv_set_property;
gobject_class->get_property = gst_agingtv_get_property;
structure = gst_caps_get_structure (caps, 0);
#if 0
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_AGINGTV_METHOD, GST_AGINGTV_METHOD_1, G_PARAM_READWRITE));
#endif
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
videofilter_class->setup = gst_agingtv_setup;
}
static void
gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
{
GstAgingTV *agingtv = GST_AGINGTV (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_agingtv_init");
videofilter = GST_VIDEOFILTER (agingtv);
/* do stuff */
}
static void
gst_agingtv_setup (GstVideofilter * videofilter)
{
GstAgingTV *agingtv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_AGINGTV (videofilter));
agingtv = GST_AGINGTV (videofilter);
/* if any setup needs to be done, do it here */
agingtv->width = width;
agingtv->height = height;
return ret;
}
static unsigned int
@ -359,69 +302,89 @@ pits (guint32 * dest, gint width, gint height, gint area_scale,
}
}
static void
gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstAgingTV *agingtv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
GstAgingTV *agingtv = GST_AGINGTV (trans);
gint width = agingtv->width;
gint height = agingtv->height;
int video_size = width * height;
guint32 *src = s;
guint32 *dest = d;
guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint area_scale = width * height / 64 / 480;
GstFlowReturn ret = GST_FLOW_OK;
gst_buffer_stamp (out, in);
if (area_scale <= 0)
area_scale = 1;
g_return_if_fail (GST_IS_AGINGTV (videofilter));
agingtv = GST_AGINGTV (videofilter);
coloraging (src, dest, video_size);
scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height);
pits (dest, width, height, area_scale, agingtv->pits_interval);
if (area_scale > 1)
dusts (dest, width, height, agingtv->dust_interval, area_scale);
return ret;
}
static void
gst_agingtv_set_property (GObject * object, guint prop_id, const GValue * value,
GParamSpec * pspec)
gst_agingtv_base_init (gpointer g_class)
{
GstAgingTV *src;
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_AGINGTV (object));
src = GST_AGINGTV (object);
gst_element_class_set_details (element_class, &agingtv_details);
GST_DEBUG ("gst_agingtv_set_property");
switch (prop_id) {
#if 0
case ARG_METHOD:
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
}
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_agingtv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_agingtv_src_template));
}
static void
gst_agingtv_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
gst_agingtv_class_init (gpointer klass, gpointer class_data)
{
GstAgingTV *src;
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_AGINGTV (object));
src = GST_AGINGTV (object);
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
switch (prop_id) {
#if 0
case ARG_METHOD:
g_value_set_enum (value, src->method);
break;
#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
parent_class = g_type_class_peek_parent (klass);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_agingtv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform);
}
static void
gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
{
}
GType
gst_agingtv_get_type (void)
{
static GType agingtv_type = 0;
if (!agingtv_type) {
static const GTypeInfo agingtv_info = {
sizeof (GstAgingTVClass),
gst_agingtv_base_init,
NULL,
gst_agingtv_class_init,
NULL,
NULL,
sizeof (GstAgingTV),
0,
gst_agingtv_init,
};
agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstAgingTV", &agingtv_info, 0);
}
return agingtv_type;
}

View file

@ -13,9 +13,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstvideofilter.h>
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <gst/video/video.h>
#define GST_TYPE_DICETV \
(gst_dicetv_get_type())
@ -60,17 +64,34 @@ struct _GstDiceTV
struct _GstDiceTVClass
{
GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
};
/* Filter signals and args */
enum
{
/* FILL ME */
RESET_SIGNAL,
LAST_SIGNAL
};
GType gst_dicetv_get_type (void);
static void gst_dicetv_create_map (GstDiceTV * filter);
static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
"Filter/Effect/Video",
"'Dices' the screen up into many small squares",
"Wim Taymans <wim.taymans@chello.be>");
static GstStaticPadTemplate gst_dicetv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstStaticPadTemplate gst_dicetv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstVideofilterClass *parent_class = NULL;
enum
{
@ -78,146 +99,50 @@ enum
ARG_CUBE_BITS
};
static void gst_dicetv_base_init (gpointer g_class);
static void gst_dicetv_class_init (gpointer g_class, gpointer class_data);
static void gst_dicetv_init (GTypeInstance * instance, gpointer g_class);
static void gst_dicetv_reset_handler (GstElement * elem);
static void gst_dicetv_create_map (GstDiceTV * filter);
static void gst_dicetv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_dicetv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_dicetv_setup (GstVideofilter * videofilter);
static void gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s);
static guint gst_dicetv_signals[LAST_SIGNAL] = { 0 };
GType
gst_dicetv_get_type (void)
static gboolean
gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType dicetv_type = 0;
GstDiceTV *filter = GST_DICETV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!dicetv_type) {
static const GTypeInfo dicetv_info = {
sizeof (GstDiceTVClass),
gst_dicetv_base_init,
NULL,
(GClassInitFunc) gst_dicetv_class_init,
NULL,
NULL,
sizeof (GstDiceTV),
0,
(GInstanceInitFunc) gst_dicetv_init,
};
structure = gst_caps_get_structure (incaps, 0);
dicetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info,
0);
}
return dicetv_type;
}
static GstVideofilterFormat gst_dicetv_formats[] = {
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00,
0x000000ff},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
0x0000ff00},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
0x00ff0000},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000},
};
static void
gst_dicetv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
"Filter/Effect/Video",
"'Dices' the screen up into many small squares",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_dicetv_details);
for (i = 0; i < G_N_ELEMENTS (gst_dicetv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_dicetv_formats + i);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
g_free (filter->dicemap);
filter->dicemap =
(gchar *) g_malloc (filter->height * filter->width * sizeof (char));
gst_dicetv_create_map (filter);
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_dicetv_class_init (gpointer g_class, gpointer class_data)
static gboolean
gst_dicetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
GstDiceTVClass *dicetv_class;
GstDiceTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
dicetv_class = GST_DICETV_CLASS (g_class);
filter = GST_DICETV (btrans);
gst_dicetv_signals[RESET_SIGNAL] =
g_signal_new ("reset",
G_TYPE_FROM_CLASS (g_class),
G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstDiceTVClass, reset),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
structure = gst_caps_get_structure (caps, 0);
dicetv_class->reset = gst_dicetv_reset_handler;
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
gobject_class->set_property = gst_dicetv_set_property;
gobject_class->get_property = gst_dicetv_get_property;
g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
videofilter_class->setup = gst_dicetv_setup;
}
static void
gst_dicetv_setup (GstVideofilter * videofilter)
{
GstDiceTV *dicetv;
g_return_if_fail (GST_IS_DICETV (videofilter));
dicetv = GST_DICETV (videofilter);
dicetv->width = gst_videofilter_get_input_width (videofilter);
dicetv->height = gst_videofilter_get_input_height (videofilter);
g_free (dicetv->dicemap);
dicetv->dicemap =
(gchar *) g_malloc (dicetv->height * dicetv->width * sizeof (char));
gst_dicetv_create_map (dicetv);
}
static void
gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
{
GstDiceTV *filter = GST_DICETV (instance);
filter->dicemap = NULL;
filter->g_cube_bits = DEFAULT_CUBE_BITS;
filter->g_cube_size = 0;
filter->g_map_height = 0;
filter->g_map_width = 0;
}
static void
gst_dicetv_reset_handler (GstElement * element)
{
GstDiceTV *filter = GST_DICETV (element);
gst_dicetv_create_map (filter);
return ret;
}
static unsigned int
@ -228,23 +153,20 @@ fastrand (void)
return (fastrand_val = fastrand_val * 1103515245 + 12345);
}
static void
gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstDiceTV *filter;
guint32 *src;
guint32 *dest;
gint i;
gint map_x, map_y, map_i;
gint base;
gint dx, dy, di;
gint video_width;
gint g_cube_bits;
gint g_cube_size;
guint32 *src, *dest;
gint i, map_x, map_y, map_i, base, dx, dy, di;
gint video_width, g_cube_bits, g_cube_size;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_DICETV (videofilter);
src = (guint32 *) s;
dest = (guint32 *) d;
filter = GST_DICETV (trans);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
gst_buffer_stamp (out, in);
video_width = filter->width;
g_cube_bits = filter->g_cube_bits;
@ -304,6 +226,8 @@ gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s)
map_i++;
}
}
return ret;
}
static void
@ -364,3 +288,78 @@ gst_dicetv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
static void
gst_dicetv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &gst_dicetv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_dicetv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_dicetv_src_template));
}
static void
gst_dicetv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_dicetv_set_property;
gobject_class->get_property = gst_dicetv_get_property;
g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_dicetv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform);
}
static void
gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
{
GstDiceTV *filter = GST_DICETV (instance);
filter->dicemap = NULL;
filter->g_cube_bits = DEFAULT_CUBE_BITS;
filter->g_cube_size = 0;
filter->g_map_height = 0;
filter->g_map_width = 0;
}
GType
gst_dicetv_get_type (void)
{
static GType dicetv_type = 0;
if (!dicetv_type) {
static const GTypeInfo dicetv_info = {
sizeof (GstDiceTVClass),
gst_dicetv_base_init,
NULL,
(GClassInitFunc) gst_dicetv_class_init,
NULL,
NULL,
sizeof (GstDiceTV),
0,
(GInstanceInitFunc) gst_dicetv_init,
};
dicetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info,
0);
}
return dicetv_type;
}

View file

@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <string.h>
#include <gst/video/video.h>
#define GST_TYPE_EDGETV \
(gst_edgetv_get_type())
#define GST_EDGETV(obj) \
@ -57,146 +60,97 @@ struct _GstEdgeTVClass
GstVideofilterClass parent_class;
};
/* Filter signals and args */
enum
GType gst_edgetv_get_type (void);
static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
"Filter/Effect/Video",
"Apply edge detect on video",
"Wim Taymans <wim.taymans@chello.be>");
static GstStaticPadTemplate gst_edgetv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstStaticPadTemplate gst_edgetv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
/* FILL ME */
LAST_SIGNAL
};
GstEdgeTV *edgetv = GST_EDGETV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
enum
{
ARG_0
};
structure = gst_caps_get_structure (incaps, 0);
static void gst_edgetv_base_init (gpointer g_class);
static void gst_edgetv_class_init (gpointer g_class, gpointer class_data);
static void gst_edgetv_init (GTypeInstance * instance, gpointer g_class);
if (gst_structure_get_int (structure, "width", &edgetv->width) &&
gst_structure_get_int (structure, "height", &edgetv->height)) {
edgetv->map_width = edgetv->width / 4;
edgetv->map_height = edgetv->height / 4;
edgetv->video_width_margin = edgetv->width % 4;
static void gst_edgetv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_edgetv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_edgetv_setup (GstVideofilter * videofilter);
static void gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
/*static guint gst_edgetv_signals[LAST_SIGNAL] = { 0 }; */
GType
gst_edgetv_get_type (void)
{
static GType edgetv_type = 0;
if (!edgetv_type) {
static const GTypeInfo edgetv_info = {
sizeof (GstEdgeTVClass),
gst_edgetv_base_init,
NULL,
(GClassInitFunc) gst_edgetv_class_init,
NULL,
NULL,
sizeof (GstEdgeTV),
0,
(GInstanceInitFunc) gst_edgetv_init,
};
edgetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
0);
}
return edgetv_type;
}
static GstVideofilterFormat gst_edgetv_formats[] = {
{"RGB ", 32, gst_edgetv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_edgetv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
"Filter/Effect/Video",
"Apply edge detect on video",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_edgetv_details);
for (i = 0; i < G_N_ELEMENTS (gst_edgetv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_edgetv_formats + i);
g_free (edgetv->map);
edgetv->map =
(guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
sizeof (guint32) * 2);
memset (edgetv->map, 0,
edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_edgetv_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_edgetv_set_property;
gobject_class->get_property = gst_edgetv_get_property;
videofilter_class->setup = gst_edgetv_setup;
}
static void
gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
{
GstEdgeTV *edgetv = GST_EDGETV (instance);
edgetv->map = NULL;
}
static void
gst_edgetv_setup (GstVideofilter * videofilter)
{
GstEdgeTV *edgetv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_EDGETV (videofilter));
edgetv = GST_EDGETV (videofilter);
edgetv->width = width;
edgetv->height = height;
edgetv->map_width = width / 4;
edgetv->map_height = height / 4;
edgetv->video_width_margin = width % 4;
g_free (edgetv->map);
edgetv->map =
(guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
sizeof (guint32) * 2);
memset (edgetv->map, 0,
edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
}
static void
gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static gboolean
gst_edgetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstEdgeTV *filter;
int x, y;
int r, g, b;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
filter = GST_EDGETV (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static GstFlowReturn
gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstEdgeTV *filter;
gint x, y, r, g, b;
guint32 *src, *dest;
guint32 p, q;
guint32 v0, v1, v2, v3;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_EDGETV (videofilter);
filter = GST_EDGETV (trans);
src = (guint32 *) s;
dest = (guint32 *) d;
gst_buffer_stamp (out, in);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
src += filter->width * 4 + 4;
dest += filter->width * 4 + 4;
@ -207,7 +161,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = *src;
q = *(src - 4);
/* difference between the current pixel and right neighbor. */
/* difference between the current pixel and right neighbor. */
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff);
@ -225,7 +179,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
b = 255;
v2 = (r << 17) | (g << 9) | b;
/* difference between the current pixel and upper neighbor. */
/* difference between the current pixel and upper neighbor. */
q = *(src - filter->width * 4);
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
@ -275,37 +229,70 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
src += filter->width * 3 + 8 + filter->video_width_margin;
dest += filter->width * 3 + 8 + filter->video_width_margin;
}
return ret;
}
static void
gst_edgetv_set_property (GObject * object, guint prop_id, const GValue * value,
GParamSpec * pspec)
gst_edgetv_base_init (gpointer g_class)
{
GstEdgeTV *filter;
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_EDGETV (object));
gst_element_class_set_details (element_class, &gst_edgetv_details);
filter = GST_EDGETV (object);
switch (prop_id) {
default:
break;
}
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_edgetv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_edgetv_src_template));
}
static void
gst_edgetv_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
gst_edgetv_class_init (gpointer klass, gpointer class_data)
{
GstEdgeTV *filter;
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_EDGETV (object));
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
filter = GST_EDGETV (object);
parent_class = g_type_class_peek_parent (klass);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_edgetv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
}
static void
gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
{
GstEdgeTV *edgetv = GST_EDGETV (instance);
edgetv->map = NULL;
}
GType
gst_edgetv_get_type (void)
{
static GType edgetv_type = 0;
if (!edgetv_type) {
static const GTypeInfo edgetv_info = {
sizeof (GstEdgeTVClass),
gst_edgetv_base_init,
NULL,
(GClassInitFunc) gst_edgetv_class_init,
NULL,
NULL,
sizeof (GstEdgeTV),
0,
(GInstanceInitFunc) gst_edgetv_init,
};
edgetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
0);
}
return edgetv_type;
}

View file

@ -25,12 +25,8 @@
#include "config.h"
#endif
#include <string.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gsteffectv.h"
struct _elements_entry
{
gchar *name;
@ -38,31 +34,17 @@ struct _elements_entry
};
static struct _elements_entry _elements[] = {
{"edgeTV", gst_edgetv_get_type},
{"agingTV", gst_agingtv_get_type},
{"diceTV", gst_dicetv_get_type},
{"warpTV", gst_warptv_get_type},
{"shagadelicTV", gst_shagadelictv_get_type},
{"vertigoTV", gst_vertigotv_get_type},
{"revTV", gst_revtv_get_type},
{"quarkTV", gst_quarktv_get_type},
{"edgetv", gst_edgetv_get_type},
{"agingtv", gst_agingtv_get_type},
{"dicetv", gst_dicetv_get_type},
{"warptv", gst_warptv_get_type},
{"shagadelictv", gst_shagadelictv_get_type},
{"vertigotv", gst_vertigotv_get_type},
{"revtv", gst_revtv_get_type},
{"quarktv", gst_quarktv_get_type},
{NULL, 0},
};
GstStaticPadTemplate gst_effectv_src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
GstStaticPadTemplate gst_effectv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static gboolean
plugin_init (GstPlugin * plugin)
{

View file

@ -31,6 +31,3 @@ GType gst_shagadelictv_get_type (void);
GType gst_vertigotv_get_type (void);
GType gst_revtv_get_type (void);
GType gst_quarktv_get_type (void);
extern GstStaticPadTemplate gst_effectv_sink_template;
extern GstStaticPadTemplate gst_effectv_src_template;

View file

@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstvideofilter.h>
#include <math.h>
#include <string.h>
#include <gst/gst.h>
#include "gsteffectv.h"
#include <gst/video/video.h>
#define GST_TYPE_QUARKTV \
(gst_quarktv_get_type())
@ -40,7 +43,7 @@
#define GST_IS_QUARKTV_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV))
/* number of frames of time-buffer. It should be as a configurable paramter */
/* number of frames of time-buffer. It should be as a configurable paramater */
/* This number also must be 2^n just for the speed. */
#define PLANES 16
@ -49,9 +52,7 @@ typedef struct _GstQuarkTVClass GstQuarkTVClass;
struct _GstQuarkTV
{
GstElement element;
GstPad *sinkpad, *srcpad;
GstVideofilter element;
gint width, height;
gint area;
@ -62,20 +63,7 @@ struct _GstQuarkTV
struct _GstQuarkTVClass
{
GstElementClass parent_class;
};
/* elementfactory information */
static GstElementDetails gst_quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
"Filter/Effect/Video",
"Motion dissolver",
"FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
GstVideofilterClass parent_class;
};
enum
@ -84,23 +72,71 @@ enum
ARG_PLANES
};
static void gst_quarktv_base_init (gpointer g_class);
static void gst_quarktv_class_init (GstQuarkTVClass * klass);
static void gst_quarktv_init (GstQuarkTV * filter);
GType gst_quarktv_get_type (void);
static GstStateChangeReturn gst_quarktv_change_state (GstElement * element,
GstStateChange transition);
static GstElementDetails quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
"Filter/Effect/Video",
"Motion dissolver",
"FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
static void gst_quarktv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_quarktv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstStaticPadTemplate gst_quarktv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static GstFlowReturn gst_quarktv_chain (GstPad * pad, GstBuffer * buffer);
static GstStaticPadTemplate gst_quarktv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static GstElementClass *parent_class = NULL;
static GstVideofilterClass *parent_class = NULL;
/* static guint gst_quarktv_signals[LAST_SIGNAL] = { 0 }; */
static gboolean
gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstQuarkTV *filter = GST_QUARKTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
structure = gst_caps_get_structure (incaps, 0);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
filter->area = filter->width * filter->height;
ret = TRUE;
}
return ret;
}
static gboolean
gst_quarktv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstQuarkTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
filter = GST_QUARKTV (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static inline guint32
fastrand (void)
@ -110,153 +146,29 @@ fastrand (void)
return (fastrand_val = fastrand_val * 1103515245 + 12345);
}
GType
gst_quarktv_get_type (void)
{
static GType quarktv_type = 0;
if (!quarktv_type) {
static const GTypeInfo quarktv_info = {
sizeof (GstQuarkTVClass),
gst_quarktv_base_init,
NULL,
(GClassInitFunc) gst_quarktv_class_init,
NULL,
NULL,
sizeof (GstQuarkTV),
0,
(GInstanceInitFunc) gst_quarktv_init,
};
quarktv_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstQuarkTV", &quarktv_info,
0);
}
return quarktv_type;
}
static void
gst_quarktv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_effectv_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_effectv_sink_template));
gst_element_class_set_details (element_class, &gst_quarktv_details);
}
static void
gst_quarktv_class_init (GstQuarkTVClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gobject_class->set_property = gst_quarktv_set_property;
gobject_class->get_property = gst_quarktv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PLANES,
g_param_spec_int ("planes", "Planes", "Number of frames in the buffer",
1, 32, PLANES, G_PARAM_READWRITE));
gstelement_class->change_state = gst_quarktv_change_state;
}
static GstPadLinkReturn
gst_quarktv_link (GstPad * pad, GstPad * peer)
{
GstQuarkTV *filter;
GstPad *otherpad;
//gint i;
//GstStructure *structure;
//GstPadLinkReturn res;
filter = GST_QUARKTV (gst_pad_get_parent (pad));
g_return_val_if_fail (GST_IS_QUARKTV (filter), GST_PAD_LINK_REFUSED);
otherpad = (pad == filter->srcpad ? filter->sinkpad : filter->srcpad);
#if 0
res = gst_pad_try_set_caps (otherpad, caps);
if (GST_PAD_LINK_FAILED (res))
return res;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &filter->width) ||
!gst_structure_get_int (structure, "height", &filter->height))
return GST_PAD_LINK_REFUSED;
filter->area = filter->width * filter->height;
for (i = 0; i < filter->planes; i++) {
if (filter->planetable[i])
gst_buffer_unref (filter->planetable[i]);
filter->planetable[i] = NULL;
}
#endif
return GST_PAD_LINK_OK;
}
static void
gst_quarktv_init (GstQuarkTV * filter)
{
filter->sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&gst_effectv_sink_template), "sink");
//gst_pad_set_getcaps_function (filter->sinkpad, gst_pad_proxy_getcaps);
gst_pad_set_chain_function (filter->sinkpad, gst_quarktv_chain);
gst_pad_set_link_function (filter->sinkpad, gst_quarktv_link);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
filter->srcpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&gst_effectv_src_template), "src");
//gst_pad_set_getcaps_function (filter->srcpad, gst_pad_proxy_getcaps);
gst_pad_set_link_function (filter->srcpad, gst_quarktv_link);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
filter->planes = PLANES;
filter->current_plane = filter->planes - 1;
}
static GstFlowReturn
gst_quarktv_chain (GstPad * pad, GstBuffer * buf)
gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstQuarkTV *filter;
guint32 *src, *dest;
GstBuffer *outbuf;
gint area;
GstFlowReturn ret;
guint32 *src, *dest;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_QUARKTV (gst_pad_get_parent (pad));
filter = GST_QUARKTV (trans);
src = (guint32 *) GST_BUFFER_DATA (buf);
gst_buffer_stamp (out, in);
area = filter->area;
ret =
gst_pad_alloc_buffer (filter->srcpad, 0, area, GST_PAD_CAPS (pad),
&outbuf);
if (ret != GST_FLOW_OK)
goto no_buffer;
dest = (guint32 *) GST_BUFFER_DATA (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
if (filter->planetable[filter->current_plane])
gst_buffer_unref (filter->planetable[filter->current_plane]);
filter->planetable[filter->current_plane] = buf;
filter->planetable[filter->current_plane] = gst_buffer_ref (in);
/* For each pixel */
while (--area) {
GstBuffer *rand;
@ -265,27 +177,37 @@ gst_quarktv_chain (GstPad * pad, GstBuffer * buf)
filter->planetable[(filter->current_plane +
(fastrand () >> 24)) & (filter->planes - 1)];
/* Copy the pixel from the random buffer to dest */
dest[area] = (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : 0);
}
ret = gst_pad_push (filter->srcpad, outbuf);
filter->current_plane--;
if (filter->current_plane < 0)
filter->current_plane = filter->planes - 1;
return ret;
no_buffer:
{
return ret;
}
}
static GstStateChangeReturn
gst_quarktv_change_state (GstElement * element, GstStateChange transition)
{
GstQuarkTV *filter = GST_QUARKTV (element);
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
filter->planetable =
(GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
break;
}
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
@ -301,18 +223,11 @@ gst_quarktv_change_state (GstElement * element, GstStateChange transition)
filter->planetable = NULL;
break;
}
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
filter->planetable =
(GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
break;
}
default:
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return ret;
}
@ -377,3 +292,72 @@ gst_quarktv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
static void
gst_quarktv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &quarktv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_quarktv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_quarktv_src_template));
}
static void
gst_quarktv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_quarktv_set_property);
gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_quarktv_get_property);
element_class->change_state = GST_DEBUG_FUNCPTR (gst_quarktv_change_state);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_quarktv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform);
}
static void
gst_quarktv_init (GTypeInstance * instance, gpointer g_class)
{
GstQuarkTV *filter = GST_QUARKTV (instance);
filter->planes = PLANES;
filter->current_plane = filter->planes - 1;
}
GType
gst_quarktv_get_type (void)
{
static GType quarktv_type = 0;
if (!quarktv_type) {
static const GTypeInfo quarktv_info = {
sizeof (GstQuarkTVClass),
gst_quarktv_base_init,
NULL,
gst_quarktv_class_init,
NULL,
NULL,
sizeof (GstQuarkTV),
0,
gst_quarktv_init,
};
quarktv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstQuarkTV", &quarktv_info, 0);
}
return quarktv_type;
}

View file

@ -43,10 +43,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstvideofilter.h>
#include <math.h>
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <gst/video/video.h>
#define GST_TYPE_REVTV \
(gst_revtv_get_type())
@ -78,15 +81,6 @@ struct _GstRevTV
struct _GstRevTVClass
{
GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
};
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
@ -97,132 +91,87 @@ enum
ARG_GAIN
};
static void gst_revtv_base_init (gpointer g_class);
static void gst_revtv_class_init (gpointer g_class, gpointer class_data);
static void gst_revtv_init (GTypeInstance * instance, gpointer g_class);
GType gst_revtv_get_type (void);
static void gst_revtv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_revtv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_revtv_setup (GstVideofilter * videofilter);
static void gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
"Filter/Effect/Video",
"A video waveform monitor for each line of video processed",
"Wim Taymans <wim.taymans@chello.be>");
/* static guint gst_revtv_signals[LAST_SIGNAL] = { 0 }; */
static GstStaticPadTemplate gst_revtv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
GType
gst_revtv_get_type (void)
static GstStaticPadTemplate gst_revtv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType revtv_type = 0;
GstRevTV *filter = GST_REVTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!revtv_type) {
static const GTypeInfo revtv_info = {
sizeof (GstRevTVClass),
gst_revtv_base_init,
NULL,
(GClassInitFunc) gst_revtv_class_init,
NULL,
NULL,
sizeof (GstRevTV),
0,
(GInstanceInitFunc) gst_revtv_init,
};
structure = gst_caps_get_structure (incaps, 0);
revtv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info,
0);
}
return revtv_type;
}
static GstVideofilterFormat gst_revtv_formats[] = {
{"RGB ", 32, gst_revtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_revtv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
"Filter/Effect/Video",
"A video waveform monitor for each line of video processed",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_revtv_details);
for (i = 0; i < G_N_ELEMENTS (gst_revtv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class, gst_revtv_formats + i);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_revtv_class_init (gpointer klass, gpointer class_data)
static gboolean
gst_revtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
GstRevTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (klass);
videofilter_class = GST_VIDEOFILTER_CLASS (klass);
filter = GST_REVTV (btrans);
gobject_class->set_property = gst_revtv_set_property;
gobject_class->get_property = gst_revtv_get_property;
structure = gst_caps_get_structure (caps, 0);
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY,
g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
1, 100, 1, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE,
g_param_spec_int ("linespace", "Linespace", "Control line spacing",
1, 100, 6, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN,
g_param_spec_int ("gain", "Gain", "Control gain",
1, 200, 50, G_PARAM_READWRITE));
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
videofilter_class->setup = gst_revtv_setup;
return ret;
}
static void
gst_revtv_init (GTypeInstance * instance, gpointer g_class)
{
GstRevTV *restv = GST_REVTV (instance);
restv->vgrabtime = 1;
restv->vgrab = 0;
restv->linespace = 6;
restv->vscale = 50;
}
static void
gst_revtv_setup (GstVideofilter * videofilter)
{
GstRevTV *revtv;
g_return_if_fail (GST_IS_REVTV (videofilter));
revtv = GST_REVTV (videofilter);
revtv->width = gst_videofilter_get_input_width (videofilter);
revtv->height = gst_videofilter_get_input_height (videofilter);
}
static void
gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstRevTV *filter;
guint32 *src, *dest;
gint width, height;
guint32 *nsrc;
gint y, x, R, G, B, yval;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_REVTV (videofilter);
filter = GST_REVTV (trans);
src = (guint32 *) s;
dest = (guint32 *) d;
gst_buffer_stamp (out, in);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@ -247,6 +196,8 @@ gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
}
}
}
return ret;
}
static void
@ -299,3 +250,83 @@ gst_revtv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
static void
gst_revtv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &gst_revtv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_revtv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_revtv_src_template));
}
static void
gst_revtv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_revtv_set_property;
gobject_class->get_property = gst_revtv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY,
g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
1, 100, 1, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE,
g_param_spec_int ("linespace", "Linespace", "Control line spacing",
1, 100, 6, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN,
g_param_spec_int ("gain", "Gain", "Control gain",
1, 200, 50, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_revtv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform);
}
static void
gst_revtv_init (GTypeInstance * instance, gpointer g_class)
{
GstRevTV *restv = GST_REVTV (instance);
restv->vgrabtime = 1;
restv->vgrab = 0;
restv->linespace = 6;
restv->vscale = 50;
}
GType
gst_revtv_get_type (void)
{
static GType revtv_type = 0;
if (!revtv_type) {
static const GTypeInfo revtv_info = {
sizeof (GstRevTVClass),
gst_revtv_base_init,
NULL,
(GClassInitFunc) gst_revtv_class_init,
NULL,
NULL,
sizeof (GstRevTV),
0,
(GInstanceInitFunc) gst_revtv_init,
};
revtv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info,
0);
}
return revtv_type;
}

View file

@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstvideofilter.h>
#include <math.h>
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <gst/video/video.h>
#define GST_TYPE_SHAGADELICTV \
(gst_shagadelictv_get_type())
@ -64,135 +67,81 @@ struct _GstShagadelicTVClass
GstVideofilterClass parent_class;
};
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_shagadelictv_base_init (gpointer g_class);
static void gst_shagadelictv_class_init (gpointer g_class, gpointer class_data);
static void gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class);
GType gst_shagadelictv_get_type (void);
static void gst_shagadelic_initialize (GstShagadelicTV * filter);
static void gst_shagadelictv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_shagadelictv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_shagadelictv_setup (GstVideofilter * videofilter);
static void gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d,
void *s);
static GstElementDetails shagadelictv_details =
GST_ELEMENT_DETAILS ("ShagadelicTV",
"Filter/Effect/Video",
"Oh behave, ShagedelicTV makes images shagadelic!",
"Wim Taymans <wim.taymans@chello.be>");
/*static guint gst_shagadelictv_signals[LAST_SIGNAL] = { 0 }; */
static GstStaticPadTemplate gst_shagadelictv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
GType
gst_shagadelictv_get_type (void)
static GstStaticPadTemplate gst_shagadelictv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType shagadelictv_type = 0;
GstShagadelicTV *filter = GST_SHAGADELICTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!shagadelictv_type) {
static const GTypeInfo shagadelictv_info = {
sizeof (GstShagadelicTVClass),
gst_shagadelictv_base_init,
NULL,
(GClassInitFunc) gst_shagadelictv_class_init,
NULL,
NULL,
sizeof (GstShagadelicTV),
0,
(GInstanceInitFunc) gst_shagadelictv_init,
};
structure = gst_caps_get_structure (incaps, 0);
shagadelictv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV",
&shagadelictv_info, 0);
}
return shagadelictv_type;
}
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
gint area = filter->width * filter->height;
static GstVideofilterFormat gst_shagadelictv_formats[] = {
{"RGB ", 32, gst_shagadelictv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
g_free (filter->ripple);
g_free (filter->spiral);
static void
gst_shagadelictv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_shagadelictv_details =
GST_ELEMENT_DETAILS ("ShagadelicTV",
"Filter/Effect/Video",
"Oh behave, ShagedelicTV makes images shagadelic!",
"Wim Taymans <wim.taymans@chello.be>");
filter->ripple = (gchar *) g_malloc (area * 4);
filter->spiral = (gchar *) g_malloc (area);
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_shagadelictv_details);
for (i = 0; i < G_N_ELEMENTS (gst_shagadelictv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_shagadelictv_formats + i);
gst_shagadelic_initialize (filter);
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_shagadelictv_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_shagadelictv_set_property;
gobject_class->get_property = gst_shagadelictv_get_property;
videofilter_class->setup = gst_shagadelictv_setup;
}
static void
gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
{
GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
filter->ripple = NULL;
filter->spiral = NULL;
}
static void
gst_shagadelictv_setup (GstVideofilter * videofilter)
static gboolean
gst_shagadelictv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstShagadelicTV *filter;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
int area;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
g_return_if_fail (GST_IS_SHAGADELICTV (videofilter));
filter = GST_SHAGADELICTV (videofilter);
filter = GST_SHAGADELICTV (btrans);
filter->width = width;
filter->height = height;
structure = gst_caps_get_structure (caps, 0);
area = filter->width * filter->height;
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
g_free (filter->ripple);
g_free (filter->spiral);
filter->ripple = (gchar *) g_malloc (area * 4);
filter->spiral = (gchar *) g_malloc (area);
gst_shagadelic_initialize (filter);
return ret;
}
static unsigned int
@ -261,8 +210,9 @@ gst_shagadelic_initialize (GstShagadelicTV * filter)
filter->phase = 0;
}
static void
gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstShagadelicTV *filter;
guint32 *src, *dest;
@ -270,11 +220,14 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
guint32 v;
guchar r, g, b;
gint width, height;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_SHAGADELICTV (videofilter);
filter = GST_SHAGADELICTV (trans);
src = (guint32 *) s;
dest = (guint32 *) d;
gst_buffer_stamp (out, in);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@ -310,37 +263,72 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
filter->ry += filter->rvy;
filter->bx += filter->bvx;
filter->by += filter->bvy;
return ret;
}
static void
gst_shagadelictv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
gst_shagadelictv_base_init (gpointer g_class)
{
GstShagadelicTV *filter;
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_SHAGADELICTV (object));
gst_element_class_set_details (element_class, &shagadelictv_details);
filter = GST_SHAGADELICTV (object);
switch (prop_id) {
default:
break;
}
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_shagadelictv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_shagadelictv_src_template));
}
static void
gst_shagadelictv_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
gst_shagadelictv_class_init (gpointer klass, gpointer class_data)
{
GstShagadelicTV *filter;
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_SHAGADELICTV (object));
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
filter = GST_SHAGADELICTV (object);
parent_class = g_type_class_peek_parent (klass);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_shagadelictv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform);
}
static void
gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
{
GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
filter->ripple = NULL;
filter->spiral = NULL;
}
GType
gst_shagadelictv_get_type (void)
{
static GType shagadelictv_type = 0;
if (!shagadelictv_type) {
static const GTypeInfo shagadelictv_info = {
sizeof (GstShagadelicTVClass),
gst_shagadelictv_base_init,
NULL,
(GClassInitFunc) gst_shagadelictv_class_init,
NULL,
NULL,
sizeof (GstShagadelicTV),
0,
(GInstanceInitFunc) gst_shagadelictv_init,
};
shagadelictv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV",
&shagadelictv_info, 0);
}
return shagadelictv_type;
}

View file

@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gstvideofilter.h>
#include <math.h>
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <gst/video/video.h>
#define GST_TYPE_VERTIGOTV \
(gst_vertigotv_get_type())
@ -61,18 +64,11 @@ struct _GstVertigoTV
struct _GstVertigoTVClass
{
GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
};
GType gst_vertigotv_get_type (void);
/* Filter signals and args */
enum
{
/* FILL ME */
RESET_SIGNAL,
LAST_SIGNAL
};
enum
{
ARG_0,
@ -80,152 +76,77 @@ enum
ARG_ZOOM_SPEED
};
static void gst_vertigotv_base_init (gpointer g_class);
static void gst_vertigotv_class_init (GstVertigoTVClass * klass,
gpointer class_data);
static void gst_vertigotv_init (GTypeInstance * instance, gpointer g_class);
static void gst_vertigotv_setup (GstVideofilter * videofilter);
static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
"Filter/Effect/Video",
"A loopback alpha blending effector with rotating and scaling",
"Wim Taymans <wim.taymans@chello.be>");
static void gst_vertigotv_reset_handler (GstElement * element);
static GstStaticPadTemplate gst_vertigotv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static void gst_vertigotv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_vertigotv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d,
void *s);
static GstStaticPadTemplate gst_vertigotv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static guint gst_vertigotv_signals[LAST_SIGNAL] = { 0 };
static GstVideofilterClass *parent_class = NULL;
GType
gst_vertigotv_get_type (void)
static gboolean
gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType vertigotv_type = 0;
GstVertigoTV *filter = GST_VERTIGOTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!vertigotv_type) {
static const GTypeInfo vertigotv_info = {
sizeof (GstVertigoTVClass),
gst_vertigotv_base_init,
NULL,
(GClassInitFunc) gst_vertigotv_class_init,
NULL,
NULL,
sizeof (GstVertigoTV),
0,
(GInstanceInitFunc) gst_vertigotv_init,
};
structure = gst_caps_get_structure (incaps, 0);
vertigotv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV",
&vertigotv_info, 0);
}
return vertigotv_type;
}
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
gint area = filter->width * filter->height;
static GstVideofilterFormat gst_vertigotv_formats[] = {
{"RGB ", 32, gst_vertigotv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
g_free (filter->buffer);
filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
static void
gst_vertigotv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
"Filter/Effect/Video",
"A loopback alpha blending effector with rotating and scaling",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
memset (filter->buffer, 0, area * 2 * sizeof (guint32));
filter->current_buffer = filter->buffer;
filter->alt_buffer = filter->buffer + area;
filter->phase = 0;
gst_element_class_set_details (element_class, &vertigotv_details);
for (i = 0; i < G_N_ELEMENTS (gst_vertigotv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_vertigotv_formats + i);
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_vertigotv_class_init (GstVertigoTVClass * klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstVideofilterClass *videofilter_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
videofilter_class = GST_VIDEOFILTER_CLASS (klass);
gst_vertigotv_signals[RESET_SIGNAL] =
g_signal_new ("reset-parms",
G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstVertigoTVClass, reset),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
klass->reset = gst_vertigotv_reset_handler;
gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
g_param_spec_float ("speed", "Speed", "Control the speed of movement",
0.01, 100.0, 0.02, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
g_param_spec_float ("zoom_speed", "Zoom Speed",
"Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
videofilter_class->setup = gst_vertigotv_setup;
}
static void
gst_vertigotv_reset_handler (GstElement * element)
{
GstVertigoTV *filter = GST_VERTIGOTV (element);
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
}
static void
gst_vertigotv_setup (GstVideofilter * videofilter)
static gboolean
gst_vertigotv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstVertigoTV *filter;
gint area;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
g_return_if_fail (GST_IS_VERTIGOTV (videofilter));
filter = GST_VERTIGOTV (videofilter);
filter = GST_VERTIGOTV (btrans);
filter->width = width;
filter->height = height;
structure = gst_caps_get_structure (caps, 0);
area = width * height;
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
g_free (filter->buffer);
filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
memset (filter->buffer, 0, area * 2 * sizeof (guint32));
filter->current_buffer = filter->buffer;
filter->alt_buffer = filter->buffer + area;
filter->phase = 0;
}
static void
gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
{
GstVertigoTV *filter = GST_VERTIGOTV (instance);
filter->buffer = NULL;
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
return ret;
}
static void
@ -276,22 +197,22 @@ gst_vertigotv_set_parms (GstVertigoTV * filter)
filter->phase = 0;
}
static void
gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstVertigoTV *filter;
guint32 *src, *dest;
guint32 *p;
guint32 *src, *dest, *p;
guint32 v;
gint x, y;
gint ox, oy;
gint i;
gint width, height, area;
gint x, y, ox, oy, i, width, height, area;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_VERTIGOTV (videofilter);
filter = GST_VERTIGOTV (trans);
src = (guint32 *) s;
dest = (guint32 *) d;
gst_buffer_stamp (out, in);
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@ -327,6 +248,8 @@ gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = filter->current_buffer;
filter->current_buffer = filter->alt_buffer;
filter->alt_buffer = p;
return ret;
}
static void
@ -373,3 +296,80 @@ gst_vertigotv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
static void
gst_vertigotv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &vertigotv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_vertigotv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_vertigotv_src_template));
}
static void
gst_vertigotv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
g_param_spec_float ("speed", "Speed", "Control the speed of movement",
0.01, 100.0, 0.02, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
g_param_spec_float ("zoom_speed", "Zoom Speed",
"Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_vertigotv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform);
}
static void
gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
{
GstVertigoTV *filter = GST_VERTIGOTV (instance);
filter->buffer = NULL;
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
}
GType
gst_vertigotv_get_type (void)
{
static GType vertigotv_type = 0;
if (!vertigotv_type) {
static const GTypeInfo vertigotv_info = {
sizeof (GstVertigoTVClass),
gst_vertigotv_base_init,
NULL,
(GClassInitFunc) gst_vertigotv_class_init,
NULL,
NULL,
sizeof (GstVertigoTV),
0,
(GInstanceInitFunc) gst_vertigotv_init,
};
vertigotv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV",
&vertigotv_info, 0);
}
return vertigotv_type;
}

View file

@ -37,17 +37,17 @@
#include "config.h"
#endif
#include <gst/gst.h>
#include <gstvideofilter.h>
#include <string.h>
#include <math.h>
#include "gsteffectv.h"
#include <gst/video/video.h>
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
#define GST_TYPE_WARPTV \
(gst_warptv_get_type())
#define GST_WARPTV(obj) \
@ -79,196 +79,85 @@ struct _GstWarpTVClass
GstVideofilterClass parent_class;
};
GType gst_warptv_get_type (void);
/* GstWarpTV signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
/* FILL ME */
};
static void gst_warptv_base_init (gpointer g_class);
static void gst_warptv_class_init (gpointer g_class, gpointer class_data);
static void gst_warptv_init (GTypeInstance * instance, gpointer g_class);
static void gst_warptv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_warptv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_warptv_setup (GstVideofilter * videofilter);
static void initSinTable (GstWarpTV * filter);
static void initOffsTable (GstWarpTV * filter);
static void initDistTable (GstWarpTV * filter);
static void gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
GType
gst_warptv_get_type (void)
static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV",
"Filter/Effect/Video",
"WarpTV does realtime goo'ing of the video input",
"Sam Lantinga <slouken@devolution.com>");
static GstStaticPadTemplate gst_warptv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstStaticPadTemplate gst_warptv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType warptv_type = 0;
GstWarpTV *filter = GST_WARPTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!warptv_type) {
static const GTypeInfo warptv_info = {
sizeof (GstWarpTVClass),
gst_warptv_base_init,
NULL,
gst_warptv_class_init,
NULL,
NULL,
sizeof (GstWarpTV),
0,
gst_warptv_init,
};
structure = gst_caps_get_structure (incaps, 0);
warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstWarpTV", &warptv_info, 0);
}
return warptv_type;
}
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
g_free (filter->disttable);
g_free (filter->offstable);
static GstVideofilterFormat gst_warptv_formats[] = {
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00,
0x000000ff},
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
0x0000ff00},
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
0x00ff0000},
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000},
};
filter->offstable = g_malloc (filter->height * sizeof (guint32));
filter->disttable =
g_malloc (filter->width * filter->height * sizeof (guint32));
static void
gst_warptv_base_init (gpointer g_class)
{
static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV",
"Filter/Effect/Video",
"WarpTV does realtime goo'ing of the video input",
"Sam Lantinga <slouken@devolution.com>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &warptv_details);
for (i = 0; i < G_N_ELEMENTS (gst_warptv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_warptv_formats + i);
initSinTable (filter);
initOffsTable (filter);
initDistTable (filter);
ret = TRUE;
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
return ret;
}
static void
gst_warptv_class_init (gpointer g_class, gpointer class_data)
static gboolean
gst_warptv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
GstWarpTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
filter = GST_WARPTV (btrans);
gobject_class->set_property = gst_warptv_set_property;
gobject_class->get_property = gst_warptv_get_property;
structure = gst_caps_get_structure (caps, 0);
#if 0
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_WARPTV_METHOD, GST_WARPTV_METHOD_1, G_PARAM_READWRITE));
#endif
videofilter_class->setup = gst_warptv_setup;
}
static void
gst_warptv_init (GTypeInstance * instance, gpointer g_class)
{
GstWarpTV *warptv = GST_WARPTV (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_warptv_init");
videofilter = GST_VIDEOFILTER (warptv);
/* do stuff */
}
static void
gst_warptv_set_property (GObject * object, guint prop_id, const GValue * value,
GParamSpec * pspec)
{
GstWarpTV *src;
g_return_if_fail (GST_IS_WARPTV (object));
src = GST_WARPTV (object);
GST_DEBUG ("gst_warptv_set_property");
switch (prop_id) {
#if 0
case ARG_METHOD:
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
}
static void
gst_warptv_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstWarpTV *src;
g_return_if_fail (GST_IS_WARPTV (object));
src = GST_WARPTV (object);
switch (prop_id) {
#if 0
case ARG_METHOD:
g_value_set_enum (value, src->method);
break;
#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_warptv_setup (GstVideofilter * videofilter)
{
GstWarpTV *warptv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_WARPTV (videofilter));
warptv = GST_WARPTV (videofilter);
/* if any setup needs to be done, do it here */
warptv->width = width;
warptv->height = height;
#if 0
/* FIXME this should be reset in PAUSE->READY, not here */
warptv->tval = 0;
#endif
g_free (warptv->disttable);
g_free (warptv->offstable);
warptv->offstable = g_malloc (height * sizeof (guint32));
warptv->disttable = g_malloc (width * height * sizeof (guint32));
initSinTable (warptv);
initOffsTable (warptv);
initDistTable (warptv);
return ret;
}
static void
@ -323,21 +212,21 @@ initDistTable (GstWarpTV * filter)
#endif
}
static void
gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
static GstFlowReturn
gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstWarpTV *warptv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
guint32 *src = s;
guint32 *dest = d;
GstWarpTV *warptv = GST_WARPTV (trans);
int width = warptv->width;
int height = warptv->height;
guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint xw, yw, cw;
gint32 c, i, x, y, dx, dy, maxx, maxy;
gint32 skip, *ctptr, *distptr;
gint32 *sintable, *ctable;
GstFlowReturn ret = GST_FLOW_OK;
g_return_if_fail (GST_IS_WARPTV (videofilter));
warptv = GST_WARPTV (videofilter);
gst_buffer_stamp (out, in);
xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35);
@ -383,4 +272,66 @@ gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
}
warptv->tval = (warptv->tval + 1) & 511;
return ret;
}
static void
gst_warptv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &warptv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_warptv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_warptv_src_template));
}
static void
gst_warptv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_warptv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform);
}
static void
gst_warptv_init (GTypeInstance * instance, gpointer g_class)
{
}
GType
gst_warptv_get_type (void)
{
static GType warptv_type = 0;
if (!warptv_type) {
static const GTypeInfo warptv_info = {
sizeof (GstWarpTVClass),
gst_warptv_base_init,
NULL,
gst_warptv_class_init,
NULL,
NULL,
sizeof (GstWarpTV),
0,
gst_warptv_init,
};
warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstWarpTV", &warptv_info, 0);
}
return warptv_type;
}

View file

@ -1,5 +1,5 @@
lib_LTLIBRARIES = libgstvideofilter-@GST_MAJORMINOR@.la
noinst_LTLIBRARIES = libgstvideoexample.la
# noinst_LTLIBRARIES = libgstvideoexample.la
plugin_LTLIBRARIES = libgstvideoflip.la
noinst_HEADERS = gstvideofilter.h gstvideoflip.h
@ -11,14 +11,16 @@ libgstvideofilter_@GST_MAJORMINOR@_la_SOURCES = gstvideofilter.c gstvideofilter.
libgstvideofilter_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS)
libgstvideofilter_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIBS)
libgstvideoexample_la_SOURCES = gstvideoexample.c
libgstvideoexample_la_CFLAGS = $(GST_CFLAGS)
libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
# libgstvideoexample_la_SOURCES = gstvideoexample.c
# libgstvideoexample_la_CFLAGS = $(GST_CFLAGS)
# libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
# libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideoflip_la_SOURCES = gstvideoflip.c
libgstvideoflip_la_CFLAGS = $(GST_CFLAGS)
libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
libgstvideoflip_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS) \
$(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS)
libgstvideoflip_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
# libgstvideobalance_la_SOURCES = gstvideobalance.c

View file

@ -27,7 +27,6 @@
#include "config.h"
#endif
/*#define DEBUG_ENABLED */
#include "gstvideobalance.h"
#ifdef HAVE_LIBOIL
#include <liboil/liboil.h>
@ -38,12 +37,6 @@
#include <gst/colorbalance/colorbalance.h>
/* GstVideobalance signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
@ -117,11 +110,6 @@ gst_videobalance_get_type (void)
return videobalance_type;
}
static GstVideofilterFormat gst_videobalance_formats[] = {
{"I420", 12, gst_videobalance_planar411,},
};
static void
gst_videobalance_base_init (gpointer g_class)
{

View file

@ -21,12 +21,8 @@
#ifndef __GST_VIDEOBALANCE_H__
#define __GST_VIDEOBALANCE_H__
#include <gst/gst.h>
#include "gstvideofilter.h"
G_BEGIN_DECLS
#define GST_TYPE_VIDEOBALANCE \

View file

@ -22,40 +22,15 @@
#include "config.h"
#endif
#include <string.h>
/*#define DEBUG_ENABLED */
#include "gstvideofilter.h"
GST_DEBUG_CATEGORY_STATIC (gst_videofilter_debug);
#define GST_CAT_DEFAULT gst_videofilter_debug
/* GstVideofilter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_METHOD
/* FILL ME */
};
static void gst_videofilter_base_init (gpointer g_class);
static void gst_videofilter_class_init (gpointer g_class, gpointer class_data);
static void gst_videofilter_init (GTypeInstance * instance, gpointer g_class);
static void gst_videofilter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_videofilter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_videofilter_chain (GstPad * pad, GstBuffer * buffer);
GstCaps *gst_videofilter_class_get_capslist (GstVideofilterClass * klass);
static GstElementClass *parent_class = NULL;
static GstBaseTransformClass *parent_class = NULL;
GType
gst_videofilter_get_type (void)
@ -65,7 +40,7 @@ gst_videofilter_get_type (void)
if (!videofilter_type) {
static const GTypeInfo videofilter_info = {
sizeof (GstVideofilterClass),
gst_videofilter_base_init,
NULL,
NULL,
gst_videofilter_class_init,
NULL,
@ -75,484 +50,37 @@ gst_videofilter_get_type (void)
gst_videofilter_init,
};
videofilter_type = g_type_register_static (GST_TYPE_ELEMENT,
videofilter_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstVideofilter", &videofilter_info, G_TYPE_FLAG_ABSTRACT);
}
return videofilter_type;
}
static void
gst_videofilter_base_init (gpointer g_class)
{
static GstElementDetails videofilter_details = {
"Video scaler",
"Filter/Effect/Video",
"Resizes video",
"David Schleef <ds@schleef.org>"
};
GstVideofilterClass *klass = (GstVideofilterClass *) g_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
klass->formats = g_ptr_array_new ();
gst_element_class_set_details (element_class, &videofilter_details);
}
static void
gst_videofilter_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseTransformClass *trans_class;
GstVideofilterClass *klass;
klass = (GstVideofilterClass *) g_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gobject_class->set_property = gst_videofilter_set_property;
gobject_class->get_property = gst_videofilter_get_property;
parent_class = g_type_class_peek_parent (klass);
GST_DEBUG_CATEGORY_INIT (gst_videofilter_debug, "videofilter", 0,
"videofilter");
}
static GstStructure *
gst_videofilter_format_get_structure (GstVideofilterFormat * format)
{
unsigned int fourcc;
GstStructure *structure;
if (format->filter_func == NULL)
return NULL;
fourcc =
GST_MAKE_FOURCC (format->fourcc[0], format->fourcc[1], format->fourcc[2],
format->fourcc[3]);
if (format->depth) {
structure = gst_structure_new ("video/x-raw-rgb",
"depth", G_TYPE_INT, format->depth,
"bpp", G_TYPE_INT, format->bpp,
"endianness", G_TYPE_INT, format->endianness,
"red_mask", G_TYPE_INT, format->red_mask,
"green_mask", G_TYPE_INT, format->green_mask,
"blue_mask", G_TYPE_INT, format->blue_mask, NULL);
} else {
structure = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fourcc, NULL);
}
gst_structure_set (structure,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
return structure;
}
GstCaps *
gst_videofilter_class_get_capslist (GstVideofilterClass * klass)
{
GstCaps *caps;
GstStructure *structure;
int i;
caps = gst_caps_new_empty ();
for (i = 0; i < klass->formats->len; i++) {
structure =
gst_videofilter_format_get_structure (g_ptr_array_index (klass->formats,
i));
gst_caps_append_structure (caps, structure);
}
return caps;
}
static GstCaps *
gst_videofilter_getcaps (GstPad * pad)
{
GstVideofilter *videofilter;
GstVideofilterClass *klass;
GstCaps *caps;
GstPad *peer;
int i;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_getcaps");
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
/* we can handle anything that was registered */
caps = gst_caps_new_empty ();
for (i = 0; i < klass->formats->len; i++) {
GstCaps *fromcaps;
fromcaps =
gst_caps_new_full (gst_videofilter_format_get_structure
(g_ptr_array_index (klass->formats, i)), NULL);
gst_caps_append (caps, fromcaps);
}
peer = gst_pad_get_peer (pad);
if (peer) {
GstCaps *peercaps;
peercaps = gst_pad_get_caps (peer);
if (peercaps) {
GstCaps *icaps;
icaps = gst_caps_intersect (peercaps, caps);
gst_caps_unref (peercaps);
gst_caps_unref (caps);
caps = icaps;
}
//gst_object_unref (peer);
}
return caps;
}
static gboolean
gst_videofilter_setcaps (GstPad * pad, GstCaps * caps)
{
GstVideofilter *videofilter;
GstStructure *structure;
int width, height;
const GValue *framerate;
int ret;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
structure = gst_caps_get_structure (caps, 0);
videofilter->format =
gst_videofilter_find_format_by_structure (videofilter, structure);
g_return_val_if_fail (videofilter->format, GST_PAD_LINK_REFUSED);
ret = gst_structure_get_int (structure, "width", &width);
ret &= gst_structure_get_int (structure, "height", &height);
framerate = gst_structure_get_value (structure, "framerate");
ret &= (framerate != NULL && GST_VALUE_HOLDS_FRACTION (framerate));
if (!ret)
return FALSE;
gst_pad_set_caps (videofilter->srcpad, caps);
GST_DEBUG_OBJECT (videofilter, "width %d height %d", width, height);
#if 0
if (pad == videofilter->srcpad) {
videofilter->to_width = width;
videofilter->to_height = height;
} else {
videofilter->from_width = width;
videofilter->from_height = height;
}
#endif
videofilter->to_width = width;
videofilter->to_height = height;
videofilter->from_width = width;
videofilter->from_height = height;
g_value_copy (framerate, &videofilter->framerate);
gst_videofilter_setup (videofilter);
return TRUE;
}
static void
gst_videofilter_init (GTypeInstance * instance, gpointer g_class)
{
GstVideofilter *videofilter = GST_VIDEOFILTER (instance);
GstPadTemplate *pad_template;
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_init");
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "sink");
g_return_if_fail (pad_template != NULL);
videofilter->sinkpad = gst_pad_new_from_template (pad_template, "sink");
gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->sinkpad);
gst_pad_set_chain_function (videofilter->sinkpad, gst_videofilter_chain);
gst_pad_set_setcaps_function (videofilter->sinkpad, gst_videofilter_setcaps);
gst_pad_set_getcaps_function (videofilter->sinkpad, gst_videofilter_getcaps);
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "src");
g_return_if_fail (pad_template != NULL);
videofilter->srcpad = gst_pad_new_from_template (pad_template, "src");
gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->srcpad);
gst_pad_set_getcaps_function (videofilter->srcpad, gst_videofilter_getcaps);
videofilter->inited = FALSE;
g_value_init (&videofilter->framerate, GST_TYPE_FRACTION);
}
static GstFlowReturn
gst_videofilter_chain (GstPad * pad, GstBuffer * buf)
{
GstVideofilter *videofilter;
guchar *data;
gulong size;
GstBuffer *outbuf;
GstFlowReturn ret;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_chain");
if (videofilter->passthru) {
return gst_pad_push (videofilter->srcpad, buf);
}
if (GST_PAD_CAPS (pad) == NULL) {
return GST_FLOW_NOT_NEGOTIATED;
}
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
GST_LOG_OBJECT (videofilter, "got buffer of %ld bytes in '%s'", size,
GST_OBJECT_NAME (videofilter));
GST_LOG_OBJECT (videofilter,
"size=%ld from=%dx%d to=%dx%d fromsize=%ld (should be %d) tosize=%d",
size, videofilter->from_width, videofilter->from_height,
videofilter->to_width, videofilter->to_height, size,
videofilter->from_buf_size, videofilter->to_buf_size);
if (size > videofilter->from_buf_size) {
GST_INFO_OBJECT (videofilter, "buffer size %ld larger than expected (%d)",
size, videofilter->from_buf_size);
return GST_FLOW_ERROR;
}
ret = gst_pad_alloc_buffer (videofilter->srcpad, GST_BUFFER_OFFSET_NONE,
videofilter->to_buf_size, GST_PAD_CAPS (videofilter->srcpad), &outbuf);
if (ret != GST_FLOW_OK)
goto no_buffer;
g_return_val_if_fail (GST_BUFFER_DATA (outbuf), GST_FLOW_ERROR);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
g_return_val_if_fail (videofilter->format, GST_FLOW_ERROR);
GST_DEBUG_OBJECT (videofilter, "format %s", videofilter->format->fourcc);
videofilter->in_buf = buf;
videofilter->out_buf = outbuf;
videofilter->format->filter_func (videofilter, GST_BUFFER_DATA (outbuf),
data);
gst_buffer_unref (buf);
GST_LOG_OBJECT (videofilter, "pushing buffer of %d bytes in '%s'",
GST_BUFFER_SIZE (outbuf), GST_OBJECT_NAME (videofilter));
ret = gst_pad_push (videofilter->srcpad, outbuf);
return ret;
no_buffer:
{
return ret;
}
}
static void
gst_videofilter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFILTER (object));
videofilter = GST_VIDEOFILTER (object);
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_videofilter_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFILTER (object));
videofilter = GST_VIDEOFILTER (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
int
gst_videofilter_get_input_width (GstVideofilter * videofilter)
{
g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
return videofilter->from_width;
}
int
gst_videofilter_get_input_height (GstVideofilter * videofilter)
{
g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
return videofilter->from_height;
}
void
gst_videofilter_set_output_size (GstVideofilter * videofilter,
int width, int height)
{
GstCaps *srccaps;
GstStructure *structure;
g_return_if_fail (GST_IS_VIDEOFILTER (videofilter));
videofilter->to_width = width;
videofilter->to_height = height;
videofilter->to_buf_size = (videofilter->to_width * videofilter->to_height
* videofilter->format->bpp) / 8;
//srccaps = gst_caps_copy (gst_pad_get_negotiated_caps (videofilter->srcpad));
srccaps = gst_caps_copy (GST_PAD_CAPS (videofilter->srcpad));
structure = gst_caps_get_structure (srccaps, 0);
gst_structure_set (structure, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, NULL);
gst_pad_set_caps (videofilter->srcpad, srccaps);
}
void
gst_videofilter_setup (GstVideofilter * videofilter)
{
GstVideofilterClass *klass;
GST_DEBUG_OBJECT (videofilter, "setup");
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
if (klass->setup) {
GST_DEBUG_OBJECT (videofilter, "calling class setup method");
klass->setup (videofilter);
}
if (videofilter->to_width == 0) {
videofilter->to_width = videofilter->from_width;
}
if (videofilter->to_height == 0) {
videofilter->to_height = videofilter->from_height;
}
g_return_if_fail (videofilter->format != NULL);
g_return_if_fail (videofilter->from_width > 0);
g_return_if_fail (videofilter->from_height > 0);
g_return_if_fail (videofilter->to_width > 0);
g_return_if_fail (videofilter->to_height > 0);
videofilter->from_buf_size =
(videofilter->from_width * videofilter->from_height *
videofilter->format->bpp) / 8;
videofilter->to_buf_size =
(videofilter->to_width * videofilter->to_height *
videofilter->format->bpp) / 8;
GST_DEBUG_OBJECT (videofilter, "from_buf_size %d to_buf_size %d",
videofilter->from_buf_size, videofilter->to_buf_size);
videofilter->inited = TRUE;
}
GstVideofilterFormat *
gst_videofilter_find_format_by_structure (GstVideofilter * videofilter,
const GstStructure * structure)
{
int i;
GstVideofilterClass *klass;
GstVideofilterFormat *format;
gboolean ret;
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
g_return_val_if_fail (structure != NULL, NULL);
if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0) {
guint32 fourcc;
ret = gst_structure_get_fourcc (structure, "format", &fourcc);
if (!ret)
return NULL;
for (i = 0; i < klass->formats->len; i++) {
guint32 format_fourcc;
format = g_ptr_array_index (klass->formats, i);
format_fourcc = GST_STR_FOURCC (format->fourcc);
if (format->depth == 0 && format_fourcc == fourcc) {
return format;
}
}
} else if (strcmp (gst_structure_get_name (structure), "video/x-raw-rgb")
== 0) {
int bpp;
int depth;
int endianness;
int red_mask;
int green_mask;
int blue_mask;
ret = gst_structure_get_int (structure, "bpp", &bpp);
ret &= gst_structure_get_int (structure, "depth", &depth);
ret &= gst_structure_get_int (structure, "endianness", &endianness);
ret &= gst_structure_get_int (structure, "red_mask", &red_mask);
ret &= gst_structure_get_int (structure, "green_mask", &green_mask);
ret &= gst_structure_get_int (structure, "blue_mask", &blue_mask);
if (!ret)
return NULL;
for (i = 0; i < klass->formats->len; i++) {
format = g_ptr_array_index (klass->formats, i);
if (format->bpp == bpp && format->depth == depth &&
format->endianness == endianness && format->red_mask == red_mask &&
format->green_mask == green_mask && format->blue_mask == blue_mask) {
return format;
}
}
}
return NULL;
}
void
gst_videofilter_class_add_format (GstVideofilterClass * videofilterclass,
GstVideofilterFormat * format)
{
g_ptr_array_add (videofilterclass->formats, format);
}
void
gst_videofilter_class_add_pad_templates (GstVideofilterClass *
videofilter_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (videofilter_class);
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_videofilter_class_get_capslist (videofilter_class)));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_videofilter_class_get_capslist (videofilter_class)));
}

View file

@ -21,32 +21,13 @@
#ifndef __GST_VIDEOFILTER_H__
#define __GST_VIDEOFILTER_H__
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
G_BEGIN_DECLS
typedef struct _GstVideofilter GstVideofilter;
typedef struct _GstVideofilterClass GstVideofilterClass;
typedef void (*GstVideofilterFilterFunc)(GstVideofilter *filter,
void *out_data, void *in_data);
typedef void (*GstVideofilterSetupFunc)(GstVideofilter *filter);
typedef struct _GstVideofilterFormat GstVideofilterFormat;
struct _GstVideofilterFormat {
char *fourcc;
int bpp;
GstVideofilterFilterFunc filter_func;
int depth;
unsigned int endianness;
unsigned int red_mask;
unsigned int green_mask;
unsigned int blue_mask;
};
#define GST_TYPE_VIDEOFILTER \
(gst_videofilter_get_type())
#define GST_VIDEOFILTER(obj) \
@ -59,51 +40,17 @@ struct _GstVideofilterFormat {
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEOFILTER))
struct _GstVideofilter {
GstElement element;
GstBaseTransform element;
GstPad *sinkpad,*srcpad;
/* video state */
gboolean inited;
GstVideofilterFormat *format;
gint to_width;
gint to_height;
gint from_width;
gint from_height;
gboolean passthru;
/* private */
gint from_buf_size;
gint to_buf_size;
GValue framerate;
GstBuffer *in_buf;
GstBuffer *out_buf;
};
struct _GstVideofilterClass {
GstElementClass parent_class;
GPtrArray *formats;
GstVideofilterSetupFunc setup;
GstBaseTransformClass parent_class;
};
GType gst_videofilter_get_type(void);
int gst_videofilter_get_input_width(GstVideofilter *videofilter);
int gst_videofilter_get_input_height(GstVideofilter *videofilter);
void gst_videofilter_set_output_size(GstVideofilter *videofilter,
int width, int height);
GstVideofilterFormat *gst_videofilter_find_format_by_structure (GstVideofilter *filter,
const GstStructure *structure);
GstCaps *gst_videofilter_class_get_capslist(GstVideofilterClass *videofilterclass);
void gst_videofilter_setup (GstVideofilter * videofilter);
void gst_videofilter_class_add_format(GstVideofilterClass *videofilterclass,
GstVideofilterFormat *format);
void gst_videofilter_class_add_pad_templates (GstVideofilterClass *videofilterclass);
G_END_DECLS
#endif /* __GST_VIDEOFILTER_H__ */

View file

@ -27,17 +27,11 @@
#include "config.h"
#endif
/*#define DEBUG_ENABLED */
#include "gstvideoflip.h"
#include <string.h>
#include <gst/video/video.h>
/* GstVideoflip signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
@ -45,21 +39,30 @@ enum
/* FILL ME */
};
GST_DEBUG_CATEGORY_STATIC (gst_videoflip_debug);
#define GST_CAT_DEFAULT gst_videoflip_debug
GST_DEBUG_CATEGORY (videoflip_debug);
#define GST_CAT_DEFAULT videoflip_debug
static void gst_videoflip_base_init (gpointer g_class);
static void gst_videoflip_class_init (gpointer g_class, gpointer class_data);
static void gst_videoflip_init (GTypeInstance * instance, gpointer g_class);
static GstElementDetails videoflip_details =
GST_ELEMENT_DETAILS ("Video Flipper",
"Filter/Effect/Video",
"Flips and rotates video",
"David Schleef <ds@schleef.org>");
static void gst_videoflip_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_videoflip_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstStaticPadTemplate gst_videoflip_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
);
static void gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest,
void *src);
static void gst_videoflip_setup (GstVideofilter * videofilter);
static GstStaticPadTemplate gst_videoflip_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
);
static GstVideofilterClass *parent_class = NULL;
#define GST_TYPE_VIDEOFLIP_METHOD (gst_videoflip_method_get_type())
@ -72,7 +75,7 @@ gst_videoflip_method_get_type (void)
{GST_VIDEOFLIP_METHOD_90R, "Rotate clockwise 90 degrees", "clockwise"},
{GST_VIDEOFLIP_METHOD_180, "Rotate 180 degrees", "rotate-180"},
{GST_VIDEOFLIP_METHOD_90L, "Rotate counter-clockwise 90 degrees",
"counterclockwise"},
"counterclockwise"},
{GST_VIDEOFLIP_METHOD_HORIZ, "Flip horizontally", "horizontal-flip"},
{GST_VIDEOFLIP_METHOD_VERT, "Flip vertically", "vertical-flip"},
{GST_VIDEOFLIP_METHOD_TRANS,
@ -89,243 +92,149 @@ gst_videoflip_method_get_type (void)
return videoflip_method_type;
}
GType
gst_videoflip_get_type (void)
static gboolean
gst_videoflip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
static GType videoflip_type = 0;
GstVideoflip *vf;
GstStructure *in_s, *out_s;
gboolean ret = FALSE;
if (!videoflip_type) {
static const GTypeInfo videoflip_info = {
sizeof (GstVideoflipClass),
gst_videoflip_base_init,
NULL,
gst_videoflip_class_init,
NULL,
NULL,
sizeof (GstVideoflip),
0,
gst_videoflip_init,
};
vf = GST_VIDEOFLIP (btrans);
videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstVideoflip", &videoflip_info, 0);
}
return videoflip_type;
}
in_s = gst_caps_get_structure (incaps, 0);
out_s = gst_caps_get_structure (outcaps, 0);
static GstVideofilterFormat gst_videoflip_formats[] = {
/* planar */
{"YV12", 12, gst_videoflip_planar411,},
{"I420", 12, gst_videoflip_planar411,},
{"IYUV", 12, gst_videoflip_planar411,},
};
if (gst_structure_get_int (in_s, "width", &vf->from_width) &&
gst_structure_get_int (in_s, "height", &vf->from_height) &&
gst_structure_get_int (out_s, "width", &vf->to_width) &&
gst_structure_get_int (out_s, "height", &vf->to_height)) {
/* Check that they are correct */
switch (vf->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
if ((vf->from_width != vf->to_height) ||
(vf->from_height != vf->to_width)) {
GST_DEBUG_OBJECT (vf, "we are inverting width and height but caps "
"are not correct : %dx%d to %dx%d", vf->from_width,
vf->from_height, vf->to_width, vf->to_height);
goto beach;
}
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
static void
gst_videoflip_base_init (gpointer g_class)
{
static GstElementDetails videoflip_details =
GST_ELEMENT_DETAILS ("Video Flipper",
"Filter/Effect/Video",
"Flips and rotates video",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &videoflip_details);
for (i = 0; i < G_N_ELEMENTS (gst_videoflip_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_videoflip_formats + i);
break;
case GST_VIDEOFLIP_METHOD_180:
case GST_VIDEOFLIP_METHOD_HORIZ:
case GST_VIDEOFLIP_METHOD_VERT:
if ((vf->from_width != vf->to_width) ||
(vf->from_height != vf->to_height)) {
GST_DEBUG_OBJECT (vf, "we are keeping width and height but caps "
"are not correct : %dx%d to %dx%d", vf->from_width,
vf->from_height, vf->to_width, vf->to_height);
goto beach;
}
break;
default:
g_assert_not_reached ();
break;
}
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
ret = TRUE;
beach:
return ret;
}
static void
gst_videoflip_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_videoflip_set_property;
gobject_class->get_property = gst_videoflip_get_property;
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
G_PARAM_READWRITE));
videofilter_class->setup = gst_videoflip_setup;
}
static void
gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
{
GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
GstVideofilter *videofilter;
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
videofilter = GST_VIDEOFILTER (videoflip);
/* do stuff */
}
static void
gst_videoflip_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
static GstCaps *
gst_videoflip_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
GstVideoflip *videoflip;
GstVideofilter *videofilter;
GstCaps *ret;
gint width, height, i;
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
videofilter = GST_VIDEOFILTER (object);
videoflip = GST_VIDEOFLIP (trans);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_set_property");
switch (prop_id) {
case ARG_METHOD:
videoflip->method = g_value_get_enum (value);
if (videofilter->inited) {
GST_DEBUG_OBJECT (videoflip, "setting up videoflip again");
gst_videofilter_setup (videofilter);
ret = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (ret); i++) {
GstStructure *structure = gst_caps_get_structure (ret, i);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
switch (videoflip->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
gst_structure_set (structure, "width", G_TYPE_INT, height,
"height", G_TYPE_INT, width, NULL);
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
case GST_VIDEOFLIP_METHOD_180:
case GST_VIDEOFLIP_METHOD_HORIZ:
case GST_VIDEOFLIP_METHOD_VERT:
gst_structure_set (structure, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, NULL);
break;
default:
g_assert_not_reached ();
break;
}
break;
default:
break;
}
}
GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
GST_PTR_FORMAT, caps, ret);
return ret;
}
static void
gst_videoflip_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVideoflip *videoflip;
/* Useful macros */
#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
switch (prop_id) {
case ARG_METHOD:
g_value_set_enum (value, videoflip->method);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_videoflip_debug, "videoflip", 0, "videoflip");
return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
GST_TYPE_VIDEOFLIP);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videoflip",
"Flips and rotates video",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
static void gst_videoflip_flip (GstVideoflip * videoflip,
unsigned char *dest, unsigned char *src, int sw, int sh, int dw, int dh);
static void
gst_videoflip_setup (GstVideofilter * videofilter)
{
int from_width, from_height;
GstVideoflip *videoflip;
videoflip = GST_VIDEOFLIP (videofilter);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_setup");
from_width = gst_videofilter_get_input_width (videofilter);
from_height = gst_videofilter_get_input_height (videofilter);
if (from_width == 0 || from_height == 0) {
return;
}
switch (videoflip->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
gst_videofilter_set_output_size (videofilter, from_height, from_width);
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
case GST_VIDEOFLIP_METHOD_180:
case GST_VIDEOFLIP_METHOD_HORIZ:
case GST_VIDEOFLIP_METHOD_VERT:
gst_videofilter_set_output_size (videofilter, from_width, from_height);
break;
default:
g_assert_not_reached ();
break;
}
GST_DEBUG_OBJECT (videoflip, "format=%p \"%s\" from %dx%d to %dx%d",
videofilter->format, videofilter->format->fourcc,
from_width, from_height, videofilter->to_width, videofilter->to_height);
if (videoflip->method == GST_VIDEOFLIP_METHOD_IDENTITY) {
GST_DEBUG_OBJECT (videoflip, "videoflip: using passthru");
videofilter->passthru = TRUE;
} else {
videofilter->passthru = FALSE;
}
}
static void
gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest, void *src)
gst_videoflip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstVideoflip *videoflip;
int sw;
int sh;
int dw;
int dh;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
g_return_if_fail (GST_IS_VIDEOFLIP (videofilter));
videoflip = GST_VIDEOFLIP (videofilter);
videoflip = GST_VIDEOFLIP (btrans);
sw = videofilter->from_width;
sh = videofilter->from_height;
dw = videofilter->to_width;
dh = videofilter->to_height;
structure = gst_caps_get_structure (caps, 0);
GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d",
sw, sh, dw, dh);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (videoflip, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
src += sw * sh;
dest += dw * dh;
dh = dh >> 1;
dw = dw >> 1;
sh = sh >> 1;
sw = sw >> 1;
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
src += sw * sh;
dest += dw * dh;
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
return ret;
}
static void
static GstFlowReturn
gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
unsigned char *src, int sw, int sh, int dw, int dh)
{
GstFlowReturn ret = GST_FLOW_OK;
int x, y;
switch (videoflip->method) {
@ -379,7 +288,265 @@ gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
}
break;
default:
/* FIXME */
ret = GST_FLOW_ERROR;
break;
}
return ret;
}
static GstFlowReturn
gst_videoflip_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstVideoflip *videoflip;
gpointer dest, src;
int sw, sh, dw, dh;
GstFlowReturn ret = GST_FLOW_OK;
videoflip = GST_VIDEOFLIP (trans);
gst_buffer_stamp (out, in);
src = GST_BUFFER_DATA (in);
dest = GST_BUFFER_DATA (out);
sw = videoflip->from_width;
sh = videoflip->from_height;
dw = videoflip->to_width;
dh = videoflip->to_height;
GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d",
sw, sh, dw, dh);
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
if (ret != GST_FLOW_OK)
goto beach;
src += sw * sh;
dest += dw * dh;
dh = dh >> 1;
dw = dw >> 1;
sh = sh >> 1;
sw = sw >> 1;
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
if (ret != GST_FLOW_OK)
goto beach;
src += sw * sh;
dest += dw * dh;
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
beach:
return ret;
}
static gboolean
gst_videoflip_handle_src_event (GstPad * pad, GstEvent * event)
{
GstVideoflip *vf;
gboolean ret;
gdouble x, y;
GstStructure *structure;
vf = GST_VIDEOFLIP (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION:
event =
GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
structure = (GstStructure *) gst_event_get_structure (event);
if (gst_structure_get_double (structure, "pointer_x", &x) &&
gst_structure_get_double (structure, "pointer_y", &y)) {
switch (vf->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_OTHER:
x = y;
y = vf->to_width - x;
break;
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
x = vf->to_height - y;
y = x;
break;
case GST_VIDEOFLIP_METHOD_180:
x = vf->to_width - x;
y = vf->to_height - y;
break;
case GST_VIDEOFLIP_METHOD_HORIZ:
x = vf->to_width - x;
y = y;
break;
case GST_VIDEOFLIP_METHOD_VERT:
x = x;
y = vf->to_height - y;
break;
default:
x = x;
y = y;
break;
}
gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x,
"pointer_y", G_TYPE_DOUBLE, y, NULL);
}
break;
default:
break;
}
ret = gst_pad_event_default (pad, event);
gst_object_unref (vf);
return ret;
}
static void
gst_videoflip_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVideoflip *videoflip;
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
videofilter = GST_VIDEOFILTER (object);
switch (prop_id) {
case ARG_METHOD:
{
GstVideoflipMethod method;
method = g_value_get_enum (value);
if (method != videoflip->method) {
GstBaseTransform *btrans = GST_BASE_TRANSFORM (videoflip);
g_mutex_lock (btrans->transform_lock);
gst_pad_set_caps (btrans->sinkpad, NULL);
gst_pad_set_caps (btrans->srcpad, NULL);
g_mutex_unlock (btrans->transform_lock);
videoflip->method = method;
}
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_videoflip_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVideoflip *videoflip;
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
switch (prop_id) {
case ARG_METHOD:
g_value_set_enum (value, videoflip->method);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_videoflip_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &videoflip_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_videoflip_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_videoflip_src_template));
}
static void
gst_videoflip_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_videoflip_set_property;
gobject_class->get_property = gst_videoflip_get_property;
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
G_PARAM_READWRITE));
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_videoflip_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_videoflip_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_videoflip_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_videoflip_transform);
}
static void
gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
{
GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
videoflip->method = GST_VIDEOFLIP_METHOD_90R;
gst_pad_set_event_function (btrans->srcpad,
GST_DEBUG_FUNCPTR (gst_videoflip_handle_src_event));
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (videoflip_debug, "videoflip", 0, "videoflip");
return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
GST_TYPE_VIDEOFLIP);
}
GType
gst_videoflip_get_type (void)
{
static GType videoflip_type = 0;
if (!videoflip_type) {
static const GTypeInfo videoflip_info = {
sizeof (GstVideoflipClass),
gst_videoflip_base_init,
NULL,
gst_videoflip_class_init,
NULL,
NULL,
sizeof (GstVideoflip),
0,
gst_videoflip_init,
};
videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstVideoflip", &videoflip_info, 0);
}
return videoflip_type;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videoflip",
"Flips and rotates video",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View file

@ -17,16 +17,11 @@
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VIDEOFLIP_H__
#define __GST_VIDEOFLIP_H__
#include <gst/gst.h>
#include "gstvideofilter.h"
G_BEGIN_DECLS
typedef enum {
@ -37,7 +32,7 @@ typedef enum {
GST_VIDEOFLIP_METHOD_HORIZ,
GST_VIDEOFLIP_METHOD_VERT,
GST_VIDEOFLIP_METHOD_TRANS,
GST_VIDEOFLIP_METHOD_OTHER,
GST_VIDEOFLIP_METHOD_OTHER
} GstVideoflipMethod;
#define GST_TYPE_VIDEOFLIP \
@ -57,6 +52,9 @@ typedef struct _GstVideoflipClass GstVideoflipClass;
struct _GstVideoflip {
GstVideofilter videofilter;
gint from_width, from_height;
gint to_width, to_height;
GstVideoflipMethod method;
};
@ -69,4 +67,3 @@ GType gst_videoflip_get_type(void);
G_END_DECLS
#endif /* __GST_VIDEOFLIP_H__ */