VideoFilter inherits from

Original commit message from CVS:
2005-11-23  Julien MOUTTE  <julien@moutte.net>

* ext/cairo/gsttimeoverlay.c:
(gst_timeoverlay_update_font_height),
(gst_timeoverlay_set_caps), (gst_timeoverlay_get_unit_size),
(gst_timeoverlay_transform), (gst_timeoverlay_base_init),
(gst_timeoverlay_class_init), (gst_timeoverlay_init),
(gst_timeoverlay_get_type):
* ext/cairo/gsttimeoverlay.h:
* gst/debug/Makefile.am:
* gst/debug/gstnavigationtest.c:
(gst_navigationtest_handle_src_event),
(gst_navigationtest_get_unit_size),
(gst_navigationtest_set_caps),
(gst_navigationtest_transform),
(gst_navigationtest_change_state),
(gst_navigationtest_base_init), (gst_navigationtest_class_init),
(gst_navigationtest_init), (gst_navigationtest_get_type),
(plugin_init):
* gst/debug/gstnavigationtest.h:
* gst/effectv/Makefile.am:
* gst/effectv/gstaging.c: (gst_agingtv_set_caps),
(gst_agingtv_get_unit_size), (gst_agingtv_transform),
(gst_agingtv_base_init), (gst_agingtv_class_init),
(gst_agingtv_init), (gst_agingtv_get_type):
* gst/effectv/gstdice.c: (gst_dicetv_set_caps),
(gst_dicetv_get_unit_size), (gst_dicetv_transform),
(gst_dicetv_base_init), (gst_dicetv_class_init),
(gst_dicetv_init),
(gst_dicetv_get_type):
* gst/effectv/gstedge.c: (gst_edgetv_set_caps),
(gst_edgetv_get_unit_size), (gst_edgetv_transform),
(gst_edgetv_base_init), (gst_edgetv_class_init),
(gst_edgetv_init),
(gst_edgetv_get_type):
* gst/effectv/gsteffectv.c:
* gst/effectv/gsteffectv.h:
* gst/effectv/gstquark.c: (gst_quarktv_set_caps),
(gst_quarktv_get_unit_size), (fastrand),
(gst_quarktv_transform),
(gst_quarktv_change_state), (gst_quarktv_base_init),
(gst_quarktv_class_init), (gst_quarktv_init),
(gst_quarktv_get_type):
* gst/effectv/gstrev.c: (gst_revtv_set_caps),
(gst_revtv_get_unit_size), (gst_revtv_transform),
(gst_revtv_base_init), (gst_revtv_class_init), (gst_revtv_init),
(gst_revtv_get_type):
* gst/effectv/gstshagadelic.c: (gst_shagadelictv_set_caps),
(gst_shagadelictv_get_unit_size), (gst_shagadelictv_transform),
(gst_shagadelictv_base_init), (gst_shagadelictv_class_init),
(gst_shagadelictv_init), (gst_shagadelictv_get_type):
* gst/effectv/gstvertigo.c: (gst_vertigotv_set_caps),
(gst_vertigotv_get_unit_size), (gst_vertigotv_transform),
(gst_vertigotv_base_init), (gst_vertigotv_class_init),
(gst_vertigotv_init), (gst_vertigotv_get_type):
* gst/effectv/gstwarp.c: (gst_warptv_set_caps),
(gst_warptv_get_unit_size), (gst_warptv_transform),
(gst_warptv_base_init), (gst_warptv_class_init),
(gst_warptv_init),
(gst_warptv_get_type):
* gst/videofilter/Makefile.am:
* gst/videofilter/gstvideobalance.c:
* gst/videofilter/gstvideobalance.h:
* gst/videofilter/gstvideofilter.c: (gst_videofilter_get_type),
(gst_videofilter_class_init), (gst_videofilter_init):
* gst/videofilter/gstvideofilter.h:
* gst/videofilter/gstvideoflip.c: (gst_videoflip_set_caps),
(gst_videoflip_transform_caps), (gst_videoflip_get_unit_size),
(gst_videoflip_flip), (gst_videoflip_transform),
(gst_videoflip_handle_src_event), (gst_videoflip_set_property),
(gst_videoflip_base_init), (gst_videoflip_class_init),
(gst_videoflip_init), (plugin_init), (gst_videoflip_get_type):
* gst/videofilter/gstvideoflip.h: VideoFilter inherits from
BaseTransform, it's just a place holder for now and every video
effect plugin has been ported to use BaseTransform features
directly. QuarkTV was fixed too (was broken), navigationtest
works
and best for the end, videoflip converts navigation events
depending
on flip method ! Fixes #320953
This commit is contained in:
Julien Moutte 2005-11-23 15:50:51 +00:00
parent 48520a455d
commit 2ea4f5b3c9
24 changed files with 1949 additions and 2356 deletions

View file

@ -1,3 +1,75 @@
2005-11-23 Julien MOUTTE <julien@moutte.net>
* ext/cairo/gsttimeoverlay.c: (gst_timeoverlay_update_font_height),
(gst_timeoverlay_set_caps), (gst_timeoverlay_get_unit_size),
(gst_timeoverlay_transform), (gst_timeoverlay_base_init),
(gst_timeoverlay_class_init), (gst_timeoverlay_init),
(gst_timeoverlay_get_type):
* ext/cairo/gsttimeoverlay.h:
* gst/debug/Makefile.am:
* gst/debug/gstnavigationtest.c:
(gst_navigationtest_handle_src_event),
(gst_navigationtest_get_unit_size), (gst_navigationtest_set_caps),
(gst_navigationtest_transform), (gst_navigationtest_change_state),
(gst_navigationtest_base_init), (gst_navigationtest_class_init),
(gst_navigationtest_init), (gst_navigationtest_get_type),
(plugin_init):
* gst/debug/gstnavigationtest.h:
* gst/effectv/Makefile.am:
* gst/effectv/gstaging.c: (gst_agingtv_set_caps),
(gst_agingtv_get_unit_size), (gst_agingtv_transform),
(gst_agingtv_base_init), (gst_agingtv_class_init),
(gst_agingtv_init), (gst_agingtv_get_type):
* gst/effectv/gstdice.c: (gst_dicetv_set_caps),
(gst_dicetv_get_unit_size), (gst_dicetv_transform),
(gst_dicetv_base_init), (gst_dicetv_class_init), (gst_dicetv_init),
(gst_dicetv_get_type):
* gst/effectv/gstedge.c: (gst_edgetv_set_caps),
(gst_edgetv_get_unit_size), (gst_edgetv_transform),
(gst_edgetv_base_init), (gst_edgetv_class_init), (gst_edgetv_init),
(gst_edgetv_get_type):
* gst/effectv/gsteffectv.c:
* gst/effectv/gsteffectv.h:
* gst/effectv/gstquark.c: (gst_quarktv_set_caps),
(gst_quarktv_get_unit_size), (fastrand), (gst_quarktv_transform),
(gst_quarktv_change_state), (gst_quarktv_base_init),
(gst_quarktv_class_init), (gst_quarktv_init),
(gst_quarktv_get_type):
* gst/effectv/gstrev.c: (gst_revtv_set_caps),
(gst_revtv_get_unit_size), (gst_revtv_transform),
(gst_revtv_base_init), (gst_revtv_class_init), (gst_revtv_init),
(gst_revtv_get_type):
* gst/effectv/gstshagadelic.c: (gst_shagadelictv_set_caps),
(gst_shagadelictv_get_unit_size), (gst_shagadelictv_transform),
(gst_shagadelictv_base_init), (gst_shagadelictv_class_init),
(gst_shagadelictv_init), (gst_shagadelictv_get_type):
* gst/effectv/gstvertigo.c: (gst_vertigotv_set_caps),
(gst_vertigotv_get_unit_size), (gst_vertigotv_transform),
(gst_vertigotv_base_init), (gst_vertigotv_class_init),
(gst_vertigotv_init), (gst_vertigotv_get_type):
* gst/effectv/gstwarp.c: (gst_warptv_set_caps),
(gst_warptv_get_unit_size), (gst_warptv_transform),
(gst_warptv_base_init), (gst_warptv_class_init), (gst_warptv_init),
(gst_warptv_get_type):
* gst/videofilter/Makefile.am:
* gst/videofilter/gstvideobalance.c:
* gst/videofilter/gstvideobalance.h:
* gst/videofilter/gstvideofilter.c: (gst_videofilter_get_type),
(gst_videofilter_class_init), (gst_videofilter_init):
* gst/videofilter/gstvideofilter.h:
* gst/videofilter/gstvideoflip.c: (gst_videoflip_set_caps),
(gst_videoflip_transform_caps), (gst_videoflip_get_unit_size),
(gst_videoflip_flip), (gst_videoflip_transform),
(gst_videoflip_handle_src_event), (gst_videoflip_set_property),
(gst_videoflip_base_init), (gst_videoflip_class_init),
(gst_videoflip_init), (plugin_init), (gst_videoflip_get_type):
* gst/videofilter/gstvideoflip.h: VideoFilter inherits from
BaseTransform, it's just a place holder for now and every video
effect plugin has been ported to use BaseTransform features
directly. QuarkTV was fixed too (was broken), navigationtest works
and best for the end, videoflip converts navigation events depending
on flip method ! Fixes #320953
2005-11-23 Jan Schmidt <thaytan@mad.scientist.com> 2005-11-23 Jan Schmidt <thaytan@mad.scientist.com>
* ext/aalib/gstaasink.c: (gst_aasink_fixate): * ext/aalib/gstaasink.c: (gst_aasink_fixate):

View file

@ -27,179 +27,47 @@
#include "config.h" #include "config.h"
#endif #endif
/*#define DEBUG_ENABLED */
#include <gsttimeoverlay.h> #include <gsttimeoverlay.h>
#include <string.h> #include <string.h>
#include <math.h> #include <math.h>
#include <cairo.h> #include <cairo.h>
#include <gst/video/video.h>
/* GstTimeoverlay signals and args */ static GstElementDetails timeoverlay_details =
enum GST_ELEMENT_DETAILS ("Time Overlay",
{ "Filter/Editor/Video",
/* FILL ME */ "Overlays the time on a video stream",
LAST_SIGNAL "David Schleef <ds@schleef.org>");
};
enum static GstStaticPadTemplate gst_timeoverlay_src_template =
{ GST_STATIC_PAD_TEMPLATE ("src",
ARG_0 GST_PAD_SRC,
/* FILL ME */ GST_PAD_ALWAYS,
}; GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static void gst_timeoverlay_base_init (gpointer g_class); static GstStaticPadTemplate gst_timeoverlay_sink_template =
static void gst_timeoverlay_class_init (gpointer g_class, gpointer class_data); GST_STATIC_PAD_TEMPLATE ("sink",
static void gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class); GST_PAD_SINK,
GST_PAD_ALWAYS,
static void gst_timeoverlay_set_property (GObject * object, guint prop_id, GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
const GValue * value, GParamSpec * pspec); );
static void gst_timeoverlay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest,
void *src);
static void gst_timeoverlay_setup (GstVideofilter * videofilter);
GType
gst_timeoverlay_get_type (void)
{
static GType timeoverlay_type = 0;
if (!timeoverlay_type) {
static const GTypeInfo timeoverlay_info = {
sizeof (GstTimeoverlayClass),
gst_timeoverlay_base_init,
NULL,
gst_timeoverlay_class_init,
NULL,
NULL,
sizeof (GstTimeoverlay),
0,
gst_timeoverlay_init,
};
timeoverlay_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstTimeoverlay", &timeoverlay_info, 0);
}
return timeoverlay_type;
}
static GstVideofilterFormat gst_timeoverlay_formats[] = {
{"I420", 12, gst_timeoverlay_planar411,},
};
static GstVideofilterClass *parent_class = NULL;
static void static void
gst_timeoverlay_base_init (gpointer g_class) gst_timeoverlay_update_font_height (GstTimeoverlay * timeoverlay)
{ {
static GstElementDetails timeoverlay_details =
GST_ELEMENT_DETAILS ("Time Overlay",
"Filter/Editor/Video",
"Overlays the time on a video stream",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &timeoverlay_details);
for (i = 0; i < G_N_ELEMENTS (gst_timeoverlay_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_timeoverlay_formats + i);
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
}
static void
gst_timeoverlay_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
#if 0
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_TIMEOVERLAY_METHOD, GST_TIMEOVERLAY_METHOD_1,
G_PARAM_READWRITE));
#endif
gobject_class->set_property = gst_timeoverlay_set_property;
gobject_class->get_property = gst_timeoverlay_get_property;
videofilter_class->setup = gst_timeoverlay_setup;
}
static void
gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class)
{
GstTimeoverlay *timeoverlay = GST_TIMEOVERLAY (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_timeoverlay_init");
videofilter = GST_VIDEOFILTER (timeoverlay);
/* do stuff */
}
static void
gst_timeoverlay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstTimeoverlay *src;
g_return_if_fail (GST_IS_TIMEOVERLAY (object));
src = GST_TIMEOVERLAY (object);
GST_DEBUG ("gst_timeoverlay_set_property");
switch (prop_id) {
#if 0
case ARG_METHOD:
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
}
}
static void
gst_timeoverlay_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstTimeoverlay *src;
g_return_if_fail (GST_IS_TIMEOVERLAY (object));
src = GST_TIMEOVERLAY (object);
switch (prop_id) {
#if 0
case ARG_METHOD:
g_value_set_enum (value, src->method);
break;
#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_timeoverlay_update_font_height (GstVideofilter * videofilter)
{
GstTimeoverlay *timeoverlay = GST_TIMEOVERLAY (videofilter);
gint width, height; gint width, height;
cairo_surface_t *font_surface; cairo_surface_t *font_surface;
cairo_t *font_cairo; cairo_t *font_cairo;
cairo_font_extents_t font_extents; cairo_font_extents_t font_extents;
width = gst_videofilter_get_input_width (videofilter); width = timeoverlay->width;
height = gst_videofilter_get_input_height (videofilter); height = timeoverlay->height;
font_surface = font_surface =
cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height); cairo_image_surface_create (CAIRO_FORMAT_ARGB32, width, height);
@ -216,15 +84,58 @@ gst_timeoverlay_update_font_height (GstVideofilter * videofilter)
font_cairo = NULL; font_cairo = NULL;
} }
static void static gboolean
gst_timeoverlay_setup (GstVideofilter * videofilter) gst_timeoverlay_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
GstTimeoverlay *timeoverlay; GstTimeoverlay *filter = GST_TIMEOVERLAY (btrans);
GstStructure *structure;
gboolean ret = FALSE;
g_return_if_fail (GST_IS_TIMEOVERLAY (videofilter)); structure = gst_caps_get_structure (incaps, 0);
timeoverlay = GST_TIMEOVERLAY (videofilter);
gst_timeoverlay_update_font_height (videofilter); if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
gst_timeoverlay_update_font_height (filter);
ret = TRUE;
}
return ret;
}
/* Useful macros */
#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
gst_timeoverlay_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstTimeoverlay *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
filter = GST_TIMEOVERLAY (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
} }
static char * static char *
@ -250,8 +161,9 @@ gst_timeoverlay_print_smpte_time (guint64 time)
} }
static void static GstFlowReturn
gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src) gst_timeoverlay_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{ {
GstTimeoverlay *timeoverlay; GstTimeoverlay *timeoverlay;
int width; int width;
@ -261,15 +173,20 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
int i, j; int i, j;
unsigned char *image; unsigned char *image;
cairo_text_extents_t extents; cairo_text_extents_t extents;
gpointer dest, src;
cairo_surface_t *font_surface; cairo_surface_t *font_surface;
cairo_t *text_cairo; cairo_t *text_cairo;
GstFlowReturn ret = GST_FLOW_OK;
g_return_if_fail (GST_IS_TIMEOVERLAY (videofilter)); timeoverlay = GST_TIMEOVERLAY (trans);
timeoverlay = GST_TIMEOVERLAY (videofilter);
width = gst_videofilter_get_input_width (videofilter); gst_buffer_stamp (out, in);
height = gst_videofilter_get_input_height (videofilter);
src = GST_BUFFER_DATA (in);
dest = GST_BUFFER_DATA (out);
width = timeoverlay->width;
height = timeoverlay->height;
/* create surface for font rendering */ /* create surface for font rendering */
/* FIXME: preparation of the surface could also be done once when settings /* FIXME: preparation of the surface could also be done once when settings
@ -292,9 +209,7 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_fill (text_cairo); cairo_fill (text_cairo);
cairo_restore (text_cairo); cairo_restore (text_cairo);
string = string = gst_timeoverlay_print_smpte_time (GST_BUFFER_TIMESTAMP (in));
gst_timeoverlay_print_smpte_time (GST_BUFFER_TIMESTAMP (videofilter->
in_buf));
cairo_save (text_cairo); cairo_save (text_cairo);
cairo_select_font_face (text_cairo, "monospace", 0, 0); cairo_select_font_face (text_cairo, "monospace", 0, 0);
cairo_set_font_size (text_cairo, 20); cairo_set_font_size (text_cairo, 20);
@ -303,12 +218,6 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_move_to (text_cairo, 0, timeoverlay->text_height - 2); cairo_move_to (text_cairo, 0, timeoverlay->text_height - 2);
cairo_show_text (text_cairo, string); cairo_show_text (text_cairo, string);
g_free (string); g_free (string);
#if 0
cairo_text_path (timeoverlay->cr, string);
cairo_set_rgb_color (timeoverlay->cr, 1, 1, 1);
cairo_set_line_width (timeoverlay->cr, 1.0);
cairo_stroke (timeoverlay->cr);
#endif
cairo_restore (text_cairo); cairo_restore (text_cairo);
@ -317,7 +226,7 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
if (b_width > width) if (b_width > width)
b_width = width; b_width = width;
memcpy (dest, src, videofilter->from_buf_size); memcpy (dest, src, GST_BUFFER_SIZE (in));
for (i = 0; i < timeoverlay->text_height; i++) { for (i = 0; i < timeoverlay->text_height; i++) {
for (j = 0; j < b_width; j++) { for (j = 0; j < b_width; j++) {
((unsigned char *) dest)[i * width + j] = image[(i * width + j) * 4 + 0]; ((unsigned char *) dest)[i * width + j] = image[(i * width + j) * 4 + 0];
@ -332,4 +241,67 @@ gst_timeoverlay_planar411 (GstVideofilter * videofilter, void *dest, void *src)
cairo_destroy (text_cairo); cairo_destroy (text_cairo);
text_cairo = NULL; text_cairo = NULL;
g_free (image); g_free (image);
return ret;
}
static void
gst_timeoverlay_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &timeoverlay_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_timeoverlay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_timeoverlay_src_template));
}
static void
gst_timeoverlay_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_timeoverlay_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_timeoverlay_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_timeoverlay_transform);
}
static void
gst_timeoverlay_init (GTypeInstance * instance, gpointer g_class)
{
}
GType
gst_timeoverlay_get_type (void)
{
static GType timeoverlay_type = 0;
if (!timeoverlay_type) {
static const GTypeInfo timeoverlay_info = {
sizeof (GstTimeoverlayClass),
gst_timeoverlay_base_init,
NULL,
gst_timeoverlay_class_init,
NULL,
NULL,
sizeof (GstTimeoverlay),
0,
gst_timeoverlay_init,
};
timeoverlay_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstTimeoverlay", &timeoverlay_info, 0);
}
return timeoverlay_type;
} }

View file

@ -21,13 +21,11 @@
#ifndef __GST_TIMEOVERLAY_H__ #ifndef __GST_TIMEOVERLAY_H__
#define __GST_TIMEOVERLAY_H__ #define __GST_TIMEOVERLAY_H__
#include <gst/gst.h> #include <gst/gst.h>
#include <cairo.h> #include <cairo.h>
#include "gstvideofilter.h" #include "gstvideofilter.h"
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_TIMEOVERLAY \ #define GST_TYPE_TIMEOVERLAY \
@ -47,6 +45,8 @@ typedef struct _GstTimeoverlayClass GstTimeoverlayClass;
struct _GstTimeoverlay { struct _GstTimeoverlay {
GstVideofilter videofilter; GstVideofilter videofilter;
gint width, height;
cairo_surface_t *surface; cairo_surface_t *surface;
cairo_t *cr; cairo_t *cr;
int text_height; int text_height;
@ -62,4 +62,3 @@ GType gst_timeoverlay_get_type(void);
G_END_DECLS G_END_DECLS
#endif /* __GST_TIMEOVERLAY_H__ */ #endif /* __GST_TIMEOVERLAY_H__ */

View file

@ -14,8 +14,12 @@ libgstefence_la_LIBADD = $(GST_LIBS)
libgstefence_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstefence_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstnavigationtest_la_SOURCES = gstnavigationtest.c libgstnavigationtest_la_SOURCES = gstnavigationtest.c
libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) -I$(top_srcdir)/gst/videofilter libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la $(GST_PLUGINS_BASE_CFLAGS) \
-I$(top_srcdir)/gst/videofilter
libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
$(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgstnavigationtest_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstnavigationtest_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdebug_la_SOURCES = \ libgstdebug_la_SOURCES = \

View file

@ -18,122 +18,40 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
/*
* This file was (probably) generated from gstnavigationtest.c,
* gstnavigationtest.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
*/
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstnavigationtest.h> #include "gstnavigationtest.h"
#include <string.h> #include <string.h>
#include <math.h> #include <math.h>
typedef struct #include <gst/video/video.h>
{
double x;
double y;
gint images_left;
guint8 cy, cu, cv;
} ButtonClick;
static void gst_navigationtest_base_init (gpointer g_class); GST_DEBUG_CATEGORY (navigationtest_debug);
static void gst_navigationtest_class_init (gpointer g_class, #define GST_CAT_DEFAULT navigationtest_debug
gpointer class_data);
static void gst_navigationtest_init (GTypeInstance * instance,
gpointer g_class);
static gboolean gst_navigationtest_handle_src_event (GstPad * pad, static GstElementDetails navigationtest_details =
GstEvent * event); GST_ELEMENT_DETAILS ("Video Navigation test",
"Filter/Effect/Video",
"Handle navigation events showing a black square following mouse pointer",
"David Schleef <ds@schleef.org>");
static GstStateChangeReturn static GstStaticPadTemplate gst_navigationtest_src_template =
gst_navigationtest_change_state (GstElement * element, GST_STATIC_PAD_TEMPLATE ("src",
GstStateChange transition); GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static void gst_navigationtest_planar411 (GstVideofilter * videofilter, static GstStaticPadTemplate gst_navigationtest_sink_template =
void *dest, void *src); GST_STATIC_PAD_TEMPLATE ("sink",
static void gst_navigationtest_setup (GstVideofilter * videofilter); GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static GstVideofilterClass *parent_class; /* NULL */ static GstVideofilterClass *parent_class = NULL;
GType
gst_navigationtest_get_type (void)
{
static GType navigationtest_type = 0;
if (!navigationtest_type) {
static const GTypeInfo navigationtest_info = {
sizeof (GstNavigationtestClass),
gst_navigationtest_base_init,
NULL,
gst_navigationtest_class_init,
NULL,
NULL,
sizeof (GstNavigationtest),
0,
gst_navigationtest_init,
};
navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstNavigationtest", &navigationtest_info, 0);
}
return navigationtest_type;
}
static GstVideofilterFormat gst_navigationtest_formats[] = {
{"I420", 12, gst_navigationtest_planar411,},
};
static void
gst_navigationtest_base_init (gpointer g_class)
{
static GstElementDetails navigationtest_details =
GST_ELEMENT_DETAILS ("Video Filter Template",
"Filter/Video",
"Template for a video filter",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &navigationtest_details);
for (i = 0; i < G_N_ELEMENTS (gst_navigationtest_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_navigationtest_formats + i);
}
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
}
static void
gst_navigationtest_class_init (gpointer g_class, gpointer class_data)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
parent_class = g_type_class_peek_parent (g_class);
element_class->change_state = gst_navigationtest_change_state;
videofilter_class->setup = gst_navigationtest_setup;
}
static void
gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
GstVideofilter *videofilter = GST_VIDEOFILTER (navtest);
gst_pad_set_event_function (videofilter->srcpad,
GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
navtest->x = -1;
navtest->y = -1;
}
static gboolean static gboolean
gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event) gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
@ -149,10 +67,8 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
const GstStructure *s = gst_event_get_structure (event); const GstStructure *s = gst_event_get_structure (event);
gint fps_n, fps_d; gint fps_n, fps_d;
fps_n = gst_value_get_fraction_numerator ( fps_n = gst_value_get_fraction_numerator ((&navtest->framerate));
(&GST_VIDEOFILTER (navtest)->framerate)); fps_d = gst_value_get_fraction_denominator ((&navtest->framerate));
fps_d = gst_value_get_fraction_denominator (
(&GST_VIDEOFILTER (navtest)->framerate));
type = gst_structure_get_string (s, "event"); type = gst_structure_get_string (s, "event");
if (g_str_equal (type, "mouse-move")) { if (g_str_equal (type, "mouse-move")) {
@ -189,16 +105,63 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
return gst_pad_event_default (pad, event); return gst_pad_event_default (pad, event);
} }
static void /* Useful macros */
gst_navigationtest_setup (GstVideofilter * videofilter) #define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
gst_navigationtest_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{ {
GstNavigationtest *navigationtest; GstNavigationtest *navtest;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter)); navtest = GST_NAVIGATIONTEST (btrans);
navigationtest = GST_NAVIGATIONTEST (videofilter);
/* if any setup needs to be done, do it here */ structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (navtest, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static gboolean
gst_navigationtest_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (btrans);
gboolean ret = FALSE;
GstStructure *structure;
structure = gst_caps_get_structure (incaps, 0);
if (gst_structure_get_int (structure, "width", &navtest->width) &&
gst_structure_get_int (structure, "height", &navtest->height)) {
const GValue *framerate;
framerate = gst_structure_get_value (structure, "framerate");
if (framerate && GST_VALUE_HOLDS_FRACTION (framerate)) {
g_value_copy (framerate, &navtest->framerate);
ret = TRUE;
}
}
return ret;
} }
static void static void
@ -242,37 +205,37 @@ draw_box_planar411 (guint8 * dest, int width, int height, int x, int y,
} }
} }
static void static GstFlowReturn
gst_navigationtest_planar411 (GstVideofilter * videofilter, gst_navigationtest_transform (GstBaseTransform * trans, GstBuffer * in,
void *dest, void *src) GstBuffer * out)
{ {
GstNavigationtest *navtest = (GstNavigationtest *) videofilter; GstNavigationtest *navtest = GST_NAVIGATIONTEST (trans);
gint width, height;
GSList *walk; GSList *walk;
GstFlowReturn ret = GST_FLOW_OK;
g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter));
width = gst_videofilter_get_input_width (videofilter);
height = gst_videofilter_get_input_height (videofilter);
/* do something interesting here. This simply copies the source /* do something interesting here. This simply copies the source
* to the destination. */ * to the destination. */
memcpy (dest, src, width * height + (width / 2) * (height / 2) * 2); gst_buffer_stamp (out, in);
memcpy (GST_BUFFER_DATA (out), GST_BUFFER_DATA (in),
MIN (GST_BUFFER_SIZE (in), GST_BUFFER_SIZE (out)));
walk = navtest->clicks; walk = navtest->clicks;
while (walk) { while (walk) {
ButtonClick *click = walk->data; ButtonClick *click = walk->data;
walk = g_slist_next (walk); walk = g_slist_next (walk);
draw_box_planar411 (dest, width, height, rint (click->x), draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
rint (click->y), click->cy, click->cu, click->cv); rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
if (--click->images_left < 1) { if (--click->images_left < 1) {
navtest->clicks = g_slist_remove (navtest->clicks, click); navtest->clicks = g_slist_remove (navtest->clicks, click);
g_free (click); g_free (click);
} }
} }
draw_box_planar411 (dest, width, height, rint (navtest->x), draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
rint (navtest->y), 0, 128, 128); rint (navtest->x), rint (navtest->y), 0, 128, 128);
return ret;
} }
static GstStateChangeReturn static GstStateChangeReturn
@ -282,12 +245,6 @@ gst_navigationtest_change_state (GstElement * element,
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstNavigationtest *navtest = GST_NAVIGATIONTEST (element); GstNavigationtest *navtest = GST_NAVIGATIONTEST (element);
/* upwards state changes */
switch (transition) {
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state) if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
@ -307,9 +264,84 @@ gst_navigationtest_change_state (GstElement * element,
return ret; return ret;
} }
static void
gst_navigationtest_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &navigationtest_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_navigationtest_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_navigationtest_src_template));
}
static void
gst_navigationtest_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_navigationtest_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_navigationtest_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_navigationtest_transform);
}
static void
gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
gst_pad_set_event_function (btrans->srcpad,
GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
navtest->x = -1;
navtest->y = -1;
}
GType
gst_navigationtest_get_type (void)
{
static GType navigationtest_type = 0;
if (!navigationtest_type) {
static const GTypeInfo navigationtest_info = {
sizeof (GstNavigationtestClass),
gst_navigationtest_base_init,
NULL,
gst_navigationtest_class_init,
NULL,
NULL,
sizeof (GstNavigationtest),
0,
gst_navigationtest_init,
};
navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstNavigationtest", &navigationtest_info, 0);
}
return navigationtest_type;
}
static gboolean static gboolean
plugin_init (GstPlugin * plugin) plugin_init (GstPlugin * plugin)
{ {
GST_DEBUG_CATEGORY_INIT (navigationtest_debug, "navigationtest", 0,
"navigationtest");
return gst_element_register (plugin, "navigationtest", GST_RANK_NONE, return gst_element_register (plugin, "navigationtest", GST_RANK_NONE,
GST_TYPE_NAVIGATIONTEST); GST_TYPE_NAVIGATIONTEST);
} }

View file

@ -21,12 +21,8 @@
#ifndef __GST_NAVIGATIONTEST_H__ #ifndef __GST_NAVIGATIONTEST_H__
#define __GST_NAVIGATIONTEST_H__ #define __GST_NAVIGATIONTEST_H__
#include <gst/gst.h>
#include "gstvideofilter.h" #include "gstvideofilter.h"
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_NAVIGATIONTEST \ #define GST_TYPE_NAVIGATIONTEST \
@ -43,11 +39,21 @@ G_BEGIN_DECLS
typedef struct _GstNavigationtest GstNavigationtest; typedef struct _GstNavigationtest GstNavigationtest;
typedef struct _GstNavigationtestClass GstNavigationtestClass; typedef struct _GstNavigationtestClass GstNavigationtestClass;
typedef struct
{
gdouble x;
gdouble y;
gint images_left;
guint8 cy, cu, cv;
} ButtonClick;
struct _GstNavigationtest { struct _GstNavigationtest {
GstVideofilter videofilter; GstVideofilter videofilter;
double x; gint width, height;
double y;
GValue framerate;
gdouble x, y;
GSList *clicks; GSList *clicks;
}; };
@ -61,4 +67,3 @@ GType gst_navigationtest_get_type(void);
G_END_DECLS G_END_DECLS
#endif /* __GST_NAVIGATIONTEST_H__ */ #endif /* __GST_NAVIGATIONTEST_H__ */

View file

@ -5,10 +5,12 @@ libgsteffectv_la_SOURCES = \
gstshagadelic.c gstvertigo.c gstrev.c gstquark.c gstshagadelic.c gstvertigo.c gstrev.c gstquark.c
libgsteffectv_la_CFLAGS = \ libgsteffectv_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \ $(GST_CFLAGS) \
-I$(top_srcdir)/gst/videofilter -I$(top_srcdir)/gst/videofilter
libgsteffectv_la_LIBADD = \ libgsteffectv_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) \ $(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \ $(GST_LIBS) \
$(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgsteffectv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgsteffectv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)

View file

@ -37,11 +37,13 @@
#include "config.h" #include "config.h"
#endif #endif
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gstvideofilter.h>
#include <string.h> #include <string.h>
#include <math.h> #include <math.h>
#include <gst/video/video.h>
#define GST_TYPE_AGINGTV \ #define GST_TYPE_AGINGTV \
(gst_agingtv_get_type()) (gst_agingtv_get_type())
#define GST_AGINGTV(obj) \ #define GST_AGINGTV(obj) \
@ -89,128 +91,69 @@ struct _GstAgingTVClass
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
}; };
/* GstAgingTV signals and args */ GType gst_agingtv_get_type (void);
enum
static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
"Filter/Effect/Video",
"AgingTV adds age to video input using scratches and dust",
"Sam Lantinga <slouken@devolution.com>");
static GstStaticPadTemplate gst_agingtv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstStaticPadTemplate gst_agingtv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
/* FILL ME */ GstAgingTV *filter = GST_AGINGTV (btrans);
LAST_SIGNAL GstStructure *structure;
}; gboolean ret = FALSE;
enum structure = gst_caps_get_structure (incaps, 0);
{
ARG_0
/* FILL ME */
};
static void gst_agingtv_base_init (gpointer g_class); if (gst_structure_get_int (structure, "width", &filter->width) &&
static void gst_agingtv_class_init (gpointer g_class, gpointer class_data); gst_structure_get_int (structure, "height", &filter->height)) {
static void gst_agingtv_init (GTypeInstance * instance, gpointer g_class); ret = TRUE;
static void gst_agingtv_setup (GstVideofilter * videofilter);
static void gst_agingtv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_agingtv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
GType
gst_agingtv_get_type (void)
{
static GType agingtv_type = 0;
if (!agingtv_type) {
static const GTypeInfo agingtv_info = {
sizeof (GstAgingTVClass),
gst_agingtv_base_init,
NULL,
gst_agingtv_class_init,
NULL,
NULL,
sizeof (GstAgingTV),
0,
gst_agingtv_init,
};
agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstAgingTV", &agingtv_info, 0);
}
return agingtv_type;
}
static GstVideofilterFormat gst_agingtv_formats[] = {
{"RGB ", 32, gst_agingtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_agingtv_base_init (gpointer g_class)
{
static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
"Filter/Effect/Video",
"AgingTV adds age to video input using scratches and dust",
"Sam Lantinga <slouken@devolution.com>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &agingtv_details);
for (i = 0; i < G_N_ELEMENTS (gst_agingtv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_agingtv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_agingtv_class_init (gpointer g_class, gpointer class_data) gst_agingtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{ {
GObjectClass *gobject_class; GstAgingTV *filter;
GstVideofilterClass *videofilter_class; GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class); filter = GST_AGINGTV (btrans);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_agingtv_set_property; structure = gst_caps_get_structure (caps, 0);
gobject_class->get_property = gst_agingtv_get_property;
#if 0 if (gst_structure_get_int (structure, "width", &width) &&
g_object_class_install_property (gobject_class, ARG_METHOD, gst_structure_get_int (structure, "height", &height)) {
g_param_spec_enum ("method", "method", "method", *size = width * height * 32 / 8;
GST_TYPE_AGINGTV_METHOD, GST_AGINGTV_METHOD_1, G_PARAM_READWRITE)); ret = TRUE;
#endif GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
videofilter_class->setup = gst_agingtv_setup; return ret;
}
static void
gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
{
GstAgingTV *agingtv = GST_AGINGTV (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_agingtv_init");
videofilter = GST_VIDEOFILTER (agingtv);
/* do stuff */
}
static void
gst_agingtv_setup (GstVideofilter * videofilter)
{
GstAgingTV *agingtv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_AGINGTV (videofilter));
agingtv = GST_AGINGTV (videofilter);
/* if any setup needs to be done, do it here */
agingtv->width = width;
agingtv->height = height;
} }
static unsigned int static unsigned int
@ -359,69 +302,89 @@ pits (guint32 * dest, gint width, gint height, gint area_scale,
} }
} }
static void static GstFlowReturn
gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s) gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{ {
GstAgingTV *agingtv; GstAgingTV *agingtv = GST_AGINGTV (trans);
int width = gst_videofilter_get_input_width (videofilter); gint width = agingtv->width;
int height = gst_videofilter_get_input_height (videofilter); gint height = agingtv->height;
int video_size = width * height; int video_size = width * height;
guint32 *src = s; guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
guint32 *dest = d; guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint area_scale = width * height / 64 / 480; gint area_scale = width * height / 64 / 480;
GstFlowReturn ret = GST_FLOW_OK;
gst_buffer_stamp (out, in);
if (area_scale <= 0) if (area_scale <= 0)
area_scale = 1; area_scale = 1;
g_return_if_fail (GST_IS_AGINGTV (videofilter));
agingtv = GST_AGINGTV (videofilter);
coloraging (src, dest, video_size); coloraging (src, dest, video_size);
scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height); scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height);
pits (dest, width, height, area_scale, agingtv->pits_interval); pits (dest, width, height, area_scale, agingtv->pits_interval);
if (area_scale > 1) if (area_scale > 1)
dusts (dest, width, height, agingtv->dust_interval, area_scale); dusts (dest, width, height, agingtv->dust_interval, area_scale);
return ret;
} }
static void static void
gst_agingtv_set_property (GObject * object, guint prop_id, const GValue * value, gst_agingtv_base_init (gpointer g_class)
GParamSpec * pspec)
{ {
GstAgingTV *src; GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_AGINGTV (object)); gst_element_class_set_details (element_class, &agingtv_details);
src = GST_AGINGTV (object);
GST_DEBUG ("gst_agingtv_set_property"); gst_element_class_add_pad_template (element_class,
switch (prop_id) { gst_static_pad_template_get (&gst_agingtv_sink_template));
#if 0 gst_element_class_add_pad_template (element_class,
case ARG_METHOD: gst_static_pad_template_get (&gst_agingtv_src_template));
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
}
} }
static void static void
gst_agingtv_get_property (GObject * object, guint prop_id, GValue * value, gst_agingtv_class_init (gpointer klass, gpointer class_data)
GParamSpec * pspec)
{ {
GstAgingTV *src; GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_AGINGTV (object)); gobject_class = (GObjectClass *) klass;
src = GST_AGINGTV (object); element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
switch (prop_id) { parent_class = g_type_class_peek_parent (klass);
#if 0
case ARG_METHOD: trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps);
g_value_set_enum (value, src->method); trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_agingtv_get_unit_size);
break; trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform);
#endif }
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); static void
break; gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
} {
}
GType
gst_agingtv_get_type (void)
{
static GType agingtv_type = 0;
if (!agingtv_type) {
static const GTypeInfo agingtv_info = {
sizeof (GstAgingTVClass),
gst_agingtv_base_init,
NULL,
gst_agingtv_class_init,
NULL,
NULL,
sizeof (GstAgingTV),
0,
gst_agingtv_init,
};
agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstAgingTV", &agingtv_info, 0);
}
return agingtv_type;
} }

View file

@ -13,9 +13,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstvideofilter.h>
#include <string.h> #include <string.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gstvideofilter.h>
#include <gst/video/video.h>
#define GST_TYPE_DICETV \ #define GST_TYPE_DICETV \
(gst_dicetv_get_type()) (gst_dicetv_get_type())
@ -60,17 +64,34 @@ struct _GstDiceTV
struct _GstDiceTVClass struct _GstDiceTVClass
{ {
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
}; };
/* Filter signals and args */ GType gst_dicetv_get_type (void);
enum
{ static void gst_dicetv_create_map (GstDiceTV * filter);
/* FILL ME */
RESET_SIGNAL, static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
LAST_SIGNAL "Filter/Effect/Video",
}; "'Dices' the screen up into many small squares",
"Wim Taymans <wim.taymans@chello.be>");
static GstStaticPadTemplate gst_dicetv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstStaticPadTemplate gst_dicetv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstVideofilterClass *parent_class = NULL;
enum enum
{ {
@ -78,146 +99,50 @@ enum
ARG_CUBE_BITS ARG_CUBE_BITS
}; };
static void gst_dicetv_base_init (gpointer g_class); static gboolean
static void gst_dicetv_class_init (gpointer g_class, gpointer class_data); gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
static void gst_dicetv_init (GTypeInstance * instance, gpointer g_class); GstCaps * outcaps)
static void gst_dicetv_reset_handler (GstElement * elem);
static void gst_dicetv_create_map (GstDiceTV * filter);
static void gst_dicetv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_dicetv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_dicetv_setup (GstVideofilter * videofilter);
static void gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s);
static guint gst_dicetv_signals[LAST_SIGNAL] = { 0 };
GType
gst_dicetv_get_type (void)
{ {
static GType dicetv_type = 0; GstDiceTV *filter = GST_DICETV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!dicetv_type) { structure = gst_caps_get_structure (incaps, 0);
static const GTypeInfo dicetv_info = {
sizeof (GstDiceTVClass),
gst_dicetv_base_init,
NULL,
(GClassInitFunc) gst_dicetv_class_init,
NULL,
NULL,
sizeof (GstDiceTV),
0,
(GInstanceInitFunc) gst_dicetv_init,
};
dicetv_type = if (gst_structure_get_int (structure, "width", &filter->width) &&
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info, gst_structure_get_int (structure, "height", &filter->height)) {
0); g_free (filter->dicemap);
} filter->dicemap =
return dicetv_type; (gchar *) g_malloc (filter->height * filter->width * sizeof (char));
} gst_dicetv_create_map (filter);
ret = TRUE;
static GstVideofilterFormat gst_dicetv_formats[] = {
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00,
0x000000ff},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
0x0000ff00},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
0x00ff0000},
{"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000},
};
static void
gst_dicetv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
"Filter/Effect/Video",
"'Dices' the screen up into many small squares",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_dicetv_details);
for (i = 0; i < G_N_ELEMENTS (gst_dicetv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_dicetv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_dicetv_class_init (gpointer g_class, gpointer class_data) gst_dicetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{ {
GObjectClass *gobject_class; GstDiceTV *filter;
GstVideofilterClass *videofilter_class; GstStructure *structure;
GstDiceTVClass *dicetv_class; gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class); filter = GST_DICETV (btrans);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
dicetv_class = GST_DICETV_CLASS (g_class);
gst_dicetv_signals[RESET_SIGNAL] = structure = gst_caps_get_structure (caps, 0);
g_signal_new ("reset",
G_TYPE_FROM_CLASS (g_class),
G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstDiceTVClass, reset),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
dicetv_class->reset = gst_dicetv_reset_handler; if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
gobject_class->set_property = gst_dicetv_set_property; return ret;
gobject_class->get_property = gst_dicetv_get_property;
g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
videofilter_class->setup = gst_dicetv_setup;
}
static void
gst_dicetv_setup (GstVideofilter * videofilter)
{
GstDiceTV *dicetv;
g_return_if_fail (GST_IS_DICETV (videofilter));
dicetv = GST_DICETV (videofilter);
dicetv->width = gst_videofilter_get_input_width (videofilter);
dicetv->height = gst_videofilter_get_input_height (videofilter);
g_free (dicetv->dicemap);
dicetv->dicemap =
(gchar *) g_malloc (dicetv->height * dicetv->width * sizeof (char));
gst_dicetv_create_map (dicetv);
}
static void
gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
{
GstDiceTV *filter = GST_DICETV (instance);
filter->dicemap = NULL;
filter->g_cube_bits = DEFAULT_CUBE_BITS;
filter->g_cube_size = 0;
filter->g_map_height = 0;
filter->g_map_width = 0;
}
static void
gst_dicetv_reset_handler (GstElement * element)
{
GstDiceTV *filter = GST_DICETV (element);
gst_dicetv_create_map (filter);
} }
static unsigned int static unsigned int
@ -228,23 +153,20 @@ fastrand (void)
return (fastrand_val = fastrand_val * 1103515245 + 12345); return (fastrand_val = fastrand_val * 1103515245 + 12345);
} }
static void static GstFlowReturn
gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s) gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{ {
GstDiceTV *filter; GstDiceTV *filter;
guint32 *src; guint32 *src, *dest;
guint32 *dest; gint i, map_x, map_y, map_i, base, dx, dy, di;
gint i; gint video_width, g_cube_bits, g_cube_size;
gint map_x, map_y, map_i; GstFlowReturn ret = GST_FLOW_OK;
gint base;
gint dx, dy, di;
gint video_width;
gint g_cube_bits;
gint g_cube_size;
filter = GST_DICETV (videofilter); filter = GST_DICETV (trans);
src = (guint32 *) s; src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) d; dest = (guint32 *) GST_BUFFER_DATA (out);
gst_buffer_stamp (out, in);
video_width = filter->width; video_width = filter->width;
g_cube_bits = filter->g_cube_bits; g_cube_bits = filter->g_cube_bits;
@ -304,6 +226,8 @@ gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s)
map_i++; map_i++;
} }
} }
return ret;
} }
static void static void
@ -364,3 +288,78 @@ gst_dicetv_get_property (GObject * object, guint prop_id, GValue * value,
break; break;
} }
} }
static void
gst_dicetv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &gst_dicetv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_dicetv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_dicetv_src_template));
}
static void
gst_dicetv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_dicetv_set_property;
gobject_class->get_property = gst_dicetv_get_property;
g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_dicetv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform);
}
static void
gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
{
GstDiceTV *filter = GST_DICETV (instance);
filter->dicemap = NULL;
filter->g_cube_bits = DEFAULT_CUBE_BITS;
filter->g_cube_size = 0;
filter->g_map_height = 0;
filter->g_map_width = 0;
}
GType
gst_dicetv_get_type (void)
{
static GType dicetv_type = 0;
if (!dicetv_type) {
static const GTypeInfo dicetv_info = {
sizeof (GstDiceTVClass),
gst_dicetv_base_init,
NULL,
(GClassInitFunc) gst_dicetv_class_init,
NULL,
NULL,
sizeof (GstDiceTV),
0,
(GInstanceInitFunc) gst_dicetv_init,
};
dicetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info,
0);
}
return dicetv_type;
}

View file

@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gstvideofilter.h>
#include <string.h>
#include <gst/video/video.h>
#define GST_TYPE_EDGETV \ #define GST_TYPE_EDGETV \
(gst_edgetv_get_type()) (gst_edgetv_get_type())
#define GST_EDGETV(obj) \ #define GST_EDGETV(obj) \
@ -57,146 +60,97 @@ struct _GstEdgeTVClass
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
}; };
/* Filter signals and args */ GType gst_edgetv_get_type (void);
enum
static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
"Filter/Effect/Video",
"Apply edge detect on video",
"Wim Taymans <wim.taymans@chello.be>");
static GstStaticPadTemplate gst_edgetv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstStaticPadTemplate gst_edgetv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
/* FILL ME */ GstEdgeTV *edgetv = GST_EDGETV (btrans);
LAST_SIGNAL GstStructure *structure;
}; gboolean ret = FALSE;
enum structure = gst_caps_get_structure (incaps, 0);
{
ARG_0
};
static void gst_edgetv_base_init (gpointer g_class); if (gst_structure_get_int (structure, "width", &edgetv->width) &&
static void gst_edgetv_class_init (gpointer g_class, gpointer class_data); gst_structure_get_int (structure, "height", &edgetv->height)) {
static void gst_edgetv_init (GTypeInstance * instance, gpointer g_class); edgetv->map_width = edgetv->width / 4;
edgetv->map_height = edgetv->height / 4;
edgetv->video_width_margin = edgetv->width % 4;
static void gst_edgetv_set_property (GObject * object, guint prop_id, g_free (edgetv->map);
const GValue * value, GParamSpec * pspec); edgetv->map =
static void gst_edgetv_get_property (GObject * object, guint prop_id, (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
GValue * value, GParamSpec * pspec); sizeof (guint32) * 2);
memset (edgetv->map, 0,
static void gst_edgetv_setup (GstVideofilter * videofilter); edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
static void gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s); ret = TRUE;
/*static guint gst_edgetv_signals[LAST_SIGNAL] = { 0 }; */
GType
gst_edgetv_get_type (void)
{
static GType edgetv_type = 0;
if (!edgetv_type) {
static const GTypeInfo edgetv_info = {
sizeof (GstEdgeTVClass),
gst_edgetv_base_init,
NULL,
(GClassInitFunc) gst_edgetv_class_init,
NULL,
NULL,
sizeof (GstEdgeTV),
0,
(GInstanceInitFunc) gst_edgetv_init,
};
edgetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
0);
}
return edgetv_type;
}
static GstVideofilterFormat gst_edgetv_formats[] = {
{"RGB ", 32, gst_edgetv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_edgetv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
"Filter/Effect/Video",
"Apply edge detect on video",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_edgetv_details);
for (i = 0; i < G_N_ELEMENTS (gst_edgetv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_edgetv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_edgetv_class_init (gpointer g_class, gpointer class_data) gst_edgetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
{ guint * size)
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_edgetv_set_property;
gobject_class->get_property = gst_edgetv_get_property;
videofilter_class->setup = gst_edgetv_setup;
}
static void
gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
{
GstEdgeTV *edgetv = GST_EDGETV (instance);
edgetv->map = NULL;
}
static void
gst_edgetv_setup (GstVideofilter * videofilter)
{
GstEdgeTV *edgetv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_EDGETV (videofilter));
edgetv = GST_EDGETV (videofilter);
edgetv->width = width;
edgetv->height = height;
edgetv->map_width = width / 4;
edgetv->map_height = height / 4;
edgetv->video_width_margin = width % 4;
g_free (edgetv->map);
edgetv->map =
(guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
sizeof (guint32) * 2);
memset (edgetv->map, 0,
edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
}
static void
gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
{ {
GstEdgeTV *filter; GstEdgeTV *filter;
int x, y; GstStructure *structure;
int r, g, b; gboolean ret = FALSE;
gint width, height;
filter = GST_EDGETV (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static GstFlowReturn
gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstEdgeTV *filter;
gint x, y, r, g, b;
guint32 *src, *dest; guint32 *src, *dest;
guint32 p, q; guint32 p, q;
guint32 v0, v1, v2, v3; guint32 v0, v1, v2, v3;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_EDGETV (videofilter); filter = GST_EDGETV (trans);
src = (guint32 *) s; gst_buffer_stamp (out, in);
dest = (guint32 *) d;
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
src += filter->width * 4 + 4; src += filter->width * 4 + 4;
dest += filter->width * 4 + 4; dest += filter->width * 4 + 4;
@ -207,7 +161,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = *src; p = *src;
q = *(src - 4); q = *(src - 4);
/* difference between the current pixel and right neighbor. */ /* difference between the current pixel and right neighbor. */
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16; r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8; g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff); b = (p & 0xff) - (q & 0xff);
@ -225,7 +179,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
b = 255; b = 255;
v2 = (r << 17) | (g << 9) | b; v2 = (r << 17) | (g << 9) | b;
/* difference between the current pixel and upper neighbor. */ /* difference between the current pixel and upper neighbor. */
q = *(src - filter->width * 4); q = *(src - filter->width * 4);
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16; r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8; g = ((p & 0xff00) - (q & 0xff00)) >> 8;
@ -275,37 +229,70 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
src += filter->width * 3 + 8 + filter->video_width_margin; src += filter->width * 3 + 8 + filter->video_width_margin;
dest += filter->width * 3 + 8 + filter->video_width_margin; dest += filter->width * 3 + 8 + filter->video_width_margin;
} }
return ret;
} }
static void static void
gst_edgetv_set_property (GObject * object, guint prop_id, const GValue * value, gst_edgetv_base_init (gpointer g_class)
GParamSpec * pspec)
{ {
GstEdgeTV *filter; GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_EDGETV (object)); gst_element_class_set_details (element_class, &gst_edgetv_details);
filter = GST_EDGETV (object); gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_edgetv_sink_template));
switch (prop_id) { gst_element_class_add_pad_template (element_class,
default: gst_static_pad_template_get (&gst_edgetv_src_template));
break;
}
} }
static void static void
gst_edgetv_get_property (GObject * object, guint prop_id, GValue * value, gst_edgetv_class_init (gpointer klass, gpointer class_data)
GParamSpec * pspec)
{ {
GstEdgeTV *filter; GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_EDGETV (object)); gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
filter = GST_EDGETV (object); parent_class = g_type_class_peek_parent (klass);
switch (prop_id) { trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
default: trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_edgetv_get_unit_size);
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
break; }
}
static void
gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
{
GstEdgeTV *edgetv = GST_EDGETV (instance);
edgetv->map = NULL;
}
GType
gst_edgetv_get_type (void)
{
static GType edgetv_type = 0;
if (!edgetv_type) {
static const GTypeInfo edgetv_info = {
sizeof (GstEdgeTVClass),
gst_edgetv_base_init,
NULL,
(GClassInitFunc) gst_edgetv_class_init,
NULL,
NULL,
sizeof (GstEdgeTV),
0,
(GInstanceInitFunc) gst_edgetv_init,
};
edgetv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
0);
}
return edgetv_type;
} }

View file

@ -25,12 +25,8 @@
#include "config.h" #include "config.h"
#endif #endif
#include <string.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gsteffectv.h" #include "gsteffectv.h"
struct _elements_entry struct _elements_entry
{ {
gchar *name; gchar *name;
@ -38,31 +34,17 @@ struct _elements_entry
}; };
static struct _elements_entry _elements[] = { static struct _elements_entry _elements[] = {
{"edgeTV", gst_edgetv_get_type}, {"edgetv", gst_edgetv_get_type},
{"agingTV", gst_agingtv_get_type}, {"agingtv", gst_agingtv_get_type},
{"diceTV", gst_dicetv_get_type}, {"dicetv", gst_dicetv_get_type},
{"warpTV", gst_warptv_get_type}, {"warptv", gst_warptv_get_type},
{"shagadelicTV", gst_shagadelictv_get_type}, {"shagadelictv", gst_shagadelictv_get_type},
{"vertigoTV", gst_vertigotv_get_type}, {"vertigotv", gst_vertigotv_get_type},
{"revTV", gst_revtv_get_type}, {"revtv", gst_revtv_get_type},
{"quarkTV", gst_quarktv_get_type}, {"quarktv", gst_quarktv_get_type},
{NULL, 0}, {NULL, 0},
}; };
GstStaticPadTemplate gst_effectv_src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
GstStaticPadTemplate gst_effectv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static gboolean static gboolean
plugin_init (GstPlugin * plugin) plugin_init (GstPlugin * plugin)
{ {

View file

@ -31,6 +31,3 @@ GType gst_shagadelictv_get_type (void);
GType gst_vertigotv_get_type (void); GType gst_vertigotv_get_type (void);
GType gst_revtv_get_type (void); GType gst_revtv_get_type (void);
GType gst_quarktv_get_type (void); GType gst_quarktv_get_type (void);
extern GstStaticPadTemplate gst_effectv_sink_template;
extern GstStaticPadTemplate gst_effectv_src_template;

View file

@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstvideofilter.h>
#include <math.h> #include <math.h>
#include <string.h> #include <string.h>
#include <gst/gst.h>
#include "gsteffectv.h" #include <gst/video/video.h>
#define GST_TYPE_QUARKTV \ #define GST_TYPE_QUARKTV \
(gst_quarktv_get_type()) (gst_quarktv_get_type())
@ -40,7 +43,7 @@
#define GST_IS_QUARKTV_CLASS(obj) \ #define GST_IS_QUARKTV_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV)) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV))
/* number of frames of time-buffer. It should be as a configurable paramter */ /* number of frames of time-buffer. It should be as a configurable paramater */
/* This number also must be 2^n just for the speed. */ /* This number also must be 2^n just for the speed. */
#define PLANES 16 #define PLANES 16
@ -49,9 +52,7 @@ typedef struct _GstQuarkTVClass GstQuarkTVClass;
struct _GstQuarkTV struct _GstQuarkTV
{ {
GstElement element; GstVideofilter element;
GstPad *sinkpad, *srcpad;
gint width, height; gint width, height;
gint area; gint area;
@ -62,20 +63,7 @@ struct _GstQuarkTV
struct _GstQuarkTVClass struct _GstQuarkTVClass
{ {
GstElementClass parent_class; GstVideofilterClass parent_class;
};
/* elementfactory information */
static GstElementDetails gst_quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
"Filter/Effect/Video",
"Motion dissolver",
"FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
}; };
enum enum
@ -84,23 +72,71 @@ enum
ARG_PLANES ARG_PLANES
}; };
static void gst_quarktv_base_init (gpointer g_class); GType gst_quarktv_get_type (void);
static void gst_quarktv_class_init (GstQuarkTVClass * klass);
static void gst_quarktv_init (GstQuarkTV * filter);
static GstStateChangeReturn gst_quarktv_change_state (GstElement * element, static GstElementDetails quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
GstStateChange transition); "Filter/Effect/Video",
"Motion dissolver",
"FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
static void gst_quarktv_set_property (GObject * object, guint prop_id, static GstStaticPadTemplate gst_quarktv_src_template =
const GValue * value, GParamSpec * pspec); GST_STATIC_PAD_TEMPLATE ("src",
static void gst_quarktv_get_property (GObject * object, guint prop_id, GST_PAD_SRC,
GValue * value, GParamSpec * pspec); GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static GstFlowReturn gst_quarktv_chain (GstPad * pad, GstBuffer * buffer); static GstStaticPadTemplate gst_quarktv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
);
static GstElementClass *parent_class = NULL; static GstVideofilterClass *parent_class = NULL;
/* static guint gst_quarktv_signals[LAST_SIGNAL] = { 0 }; */ static gboolean
gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstQuarkTV *filter = GST_QUARKTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
structure = gst_caps_get_structure (incaps, 0);
if (gst_structure_get_int (structure, "width", &filter->width) &&
gst_structure_get_int (structure, "height", &filter->height)) {
filter->area = filter->width * filter->height;
ret = TRUE;
}
return ret;
}
static gboolean
gst_quarktv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{
GstQuarkTV *filter;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
filter = GST_QUARKTV (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
return ret;
}
static inline guint32 static inline guint32
fastrand (void) fastrand (void)
@ -110,153 +146,29 @@ fastrand (void)
return (fastrand_val = fastrand_val * 1103515245 + 12345); return (fastrand_val = fastrand_val * 1103515245 + 12345);
} }
GType
gst_quarktv_get_type (void)
{
static GType quarktv_type = 0;
if (!quarktv_type) {
static const GTypeInfo quarktv_info = {
sizeof (GstQuarkTVClass),
gst_quarktv_base_init,
NULL,
(GClassInitFunc) gst_quarktv_class_init,
NULL,
NULL,
sizeof (GstQuarkTV),
0,
(GInstanceInitFunc) gst_quarktv_init,
};
quarktv_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstQuarkTV", &quarktv_info,
0);
}
return quarktv_type;
}
static void
gst_quarktv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_effectv_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_effectv_sink_template));
gst_element_class_set_details (element_class, &gst_quarktv_details);
}
static void
gst_quarktv_class_init (GstQuarkTVClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gobject_class->set_property = gst_quarktv_set_property;
gobject_class->get_property = gst_quarktv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PLANES,
g_param_spec_int ("planes", "Planes", "Number of frames in the buffer",
1, 32, PLANES, G_PARAM_READWRITE));
gstelement_class->change_state = gst_quarktv_change_state;
}
static GstPadLinkReturn
gst_quarktv_link (GstPad * pad, GstPad * peer)
{
GstQuarkTV *filter;
GstPad *otherpad;
//gint i;
//GstStructure *structure;
//GstPadLinkReturn res;
filter = GST_QUARKTV (gst_pad_get_parent (pad));
g_return_val_if_fail (GST_IS_QUARKTV (filter), GST_PAD_LINK_REFUSED);
otherpad = (pad == filter->srcpad ? filter->sinkpad : filter->srcpad);
#if 0
res = gst_pad_try_set_caps (otherpad, caps);
if (GST_PAD_LINK_FAILED (res))
return res;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &filter->width) ||
!gst_structure_get_int (structure, "height", &filter->height))
return GST_PAD_LINK_REFUSED;
filter->area = filter->width * filter->height;
for (i = 0; i < filter->planes; i++) {
if (filter->planetable[i])
gst_buffer_unref (filter->planetable[i]);
filter->planetable[i] = NULL;
}
#endif
return GST_PAD_LINK_OK;
}
static void
gst_quarktv_init (GstQuarkTV * filter)
{
filter->sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&gst_effectv_sink_template), "sink");
//gst_pad_set_getcaps_function (filter->sinkpad, gst_pad_proxy_getcaps);
gst_pad_set_chain_function (filter->sinkpad, gst_quarktv_chain);
gst_pad_set_link_function (filter->sinkpad, gst_quarktv_link);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
filter->srcpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&gst_effectv_src_template), "src");
//gst_pad_set_getcaps_function (filter->srcpad, gst_pad_proxy_getcaps);
gst_pad_set_link_function (filter->srcpad, gst_quarktv_link);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
filter->planes = PLANES;
filter->current_plane = filter->planes - 1;
}
static GstFlowReturn static GstFlowReturn
gst_quarktv_chain (GstPad * pad, GstBuffer * buf) gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{ {
GstQuarkTV *filter; GstQuarkTV *filter;
guint32 *src, *dest;
GstBuffer *outbuf;
gint area; gint area;
GstFlowReturn ret; guint32 *src, *dest;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_QUARKTV (gst_pad_get_parent (pad)); filter = GST_QUARKTV (trans);
src = (guint32 *) GST_BUFFER_DATA (buf); gst_buffer_stamp (out, in);
area = filter->area; area = filter->area;
src = (guint32 *) GST_BUFFER_DATA (in);
ret = dest = (guint32 *) GST_BUFFER_DATA (out);
gst_pad_alloc_buffer (filter->srcpad, 0, area, GST_PAD_CAPS (pad),
&outbuf);
if (ret != GST_FLOW_OK)
goto no_buffer;
dest = (guint32 *) GST_BUFFER_DATA (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
if (filter->planetable[filter->current_plane]) if (filter->planetable[filter->current_plane])
gst_buffer_unref (filter->planetable[filter->current_plane]); gst_buffer_unref (filter->planetable[filter->current_plane]);
filter->planetable[filter->current_plane] = buf; filter->planetable[filter->current_plane] = gst_buffer_ref (in);
/* For each pixel */
while (--area) { while (--area) {
GstBuffer *rand; GstBuffer *rand;
@ -265,27 +177,37 @@ gst_quarktv_chain (GstPad * pad, GstBuffer * buf)
filter->planetable[(filter->current_plane + filter->planetable[(filter->current_plane +
(fastrand () >> 24)) & (filter->planes - 1)]; (fastrand () >> 24)) & (filter->planes - 1)];
/* Copy the pixel from the random buffer to dest */
dest[area] = (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : 0); dest[area] = (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : 0);
} }
ret = gst_pad_push (filter->srcpad, outbuf);
filter->current_plane--; filter->current_plane--;
if (filter->current_plane < 0) if (filter->current_plane < 0)
filter->current_plane = filter->planes - 1; filter->current_plane = filter->planes - 1;
return ret; return ret;
no_buffer:
{
return ret;
}
} }
static GstStateChangeReturn static GstStateChangeReturn
gst_quarktv_change_state (GstElement * element, GstStateChange transition) gst_quarktv_change_state (GstElement * element, GstStateChange transition)
{ {
GstQuarkTV *filter = GST_QUARKTV (element); GstQuarkTV *filter = GST_QUARKTV (element);
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
filter->planetable =
(GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
break;
}
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_PAUSED_TO_READY:
@ -301,18 +223,11 @@ gst_quarktv_change_state (GstElement * element, GstStateChange transition)
filter->planetable = NULL; filter->planetable = NULL;
break; break;
} }
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
filter->planetable =
(GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
break;
}
default: default:
break; break;
} }
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); return ret;
} }
@ -377,3 +292,72 @@ gst_quarktv_get_property (GObject * object, guint prop_id, GValue * value,
break; break;
} }
} }
static void
gst_quarktv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &quarktv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_quarktv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_quarktv_src_template));
}
static void
gst_quarktv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_quarktv_set_property);
gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_quarktv_get_property);
element_class->change_state = GST_DEBUG_FUNCPTR (gst_quarktv_change_state);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_quarktv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform);
}
static void
gst_quarktv_init (GTypeInstance * instance, gpointer g_class)
{
GstQuarkTV *filter = GST_QUARKTV (instance);
filter->planes = PLANES;
filter->current_plane = filter->planes - 1;
}
GType
gst_quarktv_get_type (void)
{
static GType quarktv_type = 0;
if (!quarktv_type) {
static const GTypeInfo quarktv_info = {
sizeof (GstQuarkTVClass),
gst_quarktv_base_init,
NULL,
gst_quarktv_class_init,
NULL,
NULL,
sizeof (GstQuarkTV),
0,
gst_quarktv_init,
};
quarktv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstQuarkTV", &quarktv_info, 0);
}
return quarktv_type;
}

View file

@ -43,10 +43,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstvideofilter.h>
#include <math.h> #include <math.h>
#include <string.h> #include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gst/video/video.h>
#define GST_TYPE_REVTV \ #define GST_TYPE_REVTV \
(gst_revtv_get_type()) (gst_revtv_get_type())
@ -78,15 +81,6 @@ struct _GstRevTV
struct _GstRevTVClass struct _GstRevTVClass
{ {
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
};
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
}; };
enum enum
@ -97,132 +91,87 @@ enum
ARG_GAIN ARG_GAIN
}; };
static void gst_revtv_base_init (gpointer g_class); GType gst_revtv_get_type (void);
static void gst_revtv_class_init (gpointer g_class, gpointer class_data);
static void gst_revtv_init (GTypeInstance * instance, gpointer g_class);
static void gst_revtv_set_property (GObject * object, guint prop_id, static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
const GValue * value, GParamSpec * pspec); "Filter/Effect/Video",
static void gst_revtv_get_property (GObject * object, guint prop_id, "A video waveform monitor for each line of video processed",
GValue * value, GParamSpec * pspec); "Wim Taymans <wim.taymans@chello.be>");
static void gst_revtv_setup (GstVideofilter * videofilter);
static void gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
/* static guint gst_revtv_signals[LAST_SIGNAL] = { 0 }; */ static GstStaticPadTemplate gst_revtv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
GType static GstStaticPadTemplate gst_revtv_sink_template =
gst_revtv_get_type (void) GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
static GType revtv_type = 0; GstRevTV *filter = GST_REVTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!revtv_type) { structure = gst_caps_get_structure (incaps, 0);
static const GTypeInfo revtv_info = {
sizeof (GstRevTVClass),
gst_revtv_base_init,
NULL,
(GClassInitFunc) gst_revtv_class_init,
NULL,
NULL,
sizeof (GstRevTV),
0,
(GInstanceInitFunc) gst_revtv_init,
};
revtv_type = if (gst_structure_get_int (structure, "width", &filter->width) &&
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info, gst_structure_get_int (structure, "height", &filter->height)) {
0); ret = TRUE;
}
return revtv_type;
}
static GstVideofilterFormat gst_revtv_formats[] = {
{"RGB ", 32, gst_revtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000}
};
static void
gst_revtv_base_init (gpointer g_class)
{
/* elementfactory information */
static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
"Filter/Effect/Video",
"A video waveform monitor for each line of video processed",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &gst_revtv_details);
for (i = 0; i < G_N_ELEMENTS (gst_revtv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class, gst_revtv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_revtv_class_init (gpointer klass, gpointer class_data) gst_revtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{ {
GObjectClass *gobject_class; GstRevTV *filter;
GstVideofilterClass *videofilter_class; GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (klass); filter = GST_REVTV (btrans);
videofilter_class = GST_VIDEOFILTER_CLASS (klass);
gobject_class->set_property = gst_revtv_set_property; structure = gst_caps_get_structure (caps, 0);
gobject_class->get_property = gst_revtv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY, if (gst_structure_get_int (structure, "width", &width) &&
g_param_spec_int ("delay", "Delay", "Delay in frames between updates", gst_structure_get_int (structure, "height", &height)) {
1, 100, 1, G_PARAM_READWRITE)); *size = width * height * 32 / 8;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE, ret = TRUE;
g_param_spec_int ("linespace", "Linespace", "Control line spacing", GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
1, 100, 6, G_PARAM_READWRITE)); width, height);
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN, }
g_param_spec_int ("gain", "Gain", "Control gain",
1, 200, 50, G_PARAM_READWRITE));
videofilter_class->setup = gst_revtv_setup; return ret;
} }
static void static GstFlowReturn
gst_revtv_init (GTypeInstance * instance, gpointer g_class) gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstRevTV *restv = GST_REVTV (instance);
restv->vgrabtime = 1;
restv->vgrab = 0;
restv->linespace = 6;
restv->vscale = 50;
}
static void
gst_revtv_setup (GstVideofilter * videofilter)
{
GstRevTV *revtv;
g_return_if_fail (GST_IS_REVTV (videofilter));
revtv = GST_REVTV (videofilter);
revtv->width = gst_videofilter_get_input_width (videofilter);
revtv->height = gst_videofilter_get_input_height (videofilter);
}
static void
gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
{ {
GstRevTV *filter; GstRevTV *filter;
guint32 *src, *dest; guint32 *src, *dest;
gint width, height; gint width, height;
guint32 *nsrc; guint32 *nsrc;
gint y, x, R, G, B, yval; gint y, x, R, G, B, yval;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_REVTV (videofilter); filter = GST_REVTV (trans);
src = (guint32 *) s; gst_buffer_stamp (out, in);
dest = (guint32 *) d;
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width; width = filter->width;
height = filter->height; height = filter->height;
@ -247,6 +196,8 @@ gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
} }
} }
} }
return ret;
} }
static void static void
@ -299,3 +250,83 @@ gst_revtv_get_property (GObject * object, guint prop_id, GValue * value,
break; break;
} }
} }
static void
gst_revtv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &gst_revtv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_revtv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_revtv_src_template));
}
static void
gst_revtv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_revtv_set_property;
gobject_class->get_property = gst_revtv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY,
g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
1, 100, 1, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE,
g_param_spec_int ("linespace", "Linespace", "Control line spacing",
1, 100, 6, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN,
g_param_spec_int ("gain", "Gain", "Control gain",
1, 200, 50, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_revtv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform);
}
static void
gst_revtv_init (GTypeInstance * instance, gpointer g_class)
{
GstRevTV *restv = GST_REVTV (instance);
restv->vgrabtime = 1;
restv->vgrab = 0;
restv->linespace = 6;
restv->vscale = 50;
}
GType
gst_revtv_get_type (void)
{
static GType revtv_type = 0;
if (!revtv_type) {
static const GTypeInfo revtv_info = {
sizeof (GstRevTVClass),
gst_revtv_base_init,
NULL,
(GClassInitFunc) gst_revtv_class_init,
NULL,
NULL,
sizeof (GstRevTV),
0,
(GInstanceInitFunc) gst_revtv_init,
};
revtv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info,
0);
}
return revtv_type;
}

View file

@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstvideofilter.h>
#include <math.h> #include <math.h>
#include <string.h> #include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gst/video/video.h>
#define GST_TYPE_SHAGADELICTV \ #define GST_TYPE_SHAGADELICTV \
(gst_shagadelictv_get_type()) (gst_shagadelictv_get_type())
@ -64,135 +67,81 @@ struct _GstShagadelicTVClass
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
}; };
/* Filter signals and args */ GType gst_shagadelictv_get_type (void);
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_shagadelictv_base_init (gpointer g_class);
static void gst_shagadelictv_class_init (gpointer g_class, gpointer class_data);
static void gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class);
static void gst_shagadelic_initialize (GstShagadelicTV * filter); static void gst_shagadelic_initialize (GstShagadelicTV * filter);
static void gst_shagadelictv_set_property (GObject * object, guint prop_id, static GstElementDetails shagadelictv_details =
const GValue * value, GParamSpec * pspec); GST_ELEMENT_DETAILS ("ShagadelicTV",
static void gst_shagadelictv_get_property (GObject * object, guint prop_id, "Filter/Effect/Video",
GValue * value, GParamSpec * pspec); "Oh behave, ShagedelicTV makes images shagadelic!",
static void gst_shagadelictv_setup (GstVideofilter * videofilter); "Wim Taymans <wim.taymans@chello.be>");
static void gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d,
void *s);
/*static guint gst_shagadelictv_signals[LAST_SIGNAL] = { 0 }; */ static GstStaticPadTemplate gst_shagadelictv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
GType static GstStaticPadTemplate gst_shagadelictv_sink_template =
gst_shagadelictv_get_type (void) GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
static GType shagadelictv_type = 0; GstShagadelicTV *filter = GST_SHAGADELICTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!shagadelictv_type) { structure = gst_caps_get_structure (incaps, 0);
static const GTypeInfo shagadelictv_info = {
sizeof (GstShagadelicTVClass),
gst_shagadelictv_base_init,
NULL,
(GClassInitFunc) gst_shagadelictv_class_init,
NULL,
NULL,
sizeof (GstShagadelicTV),
0,
(GInstanceInitFunc) gst_shagadelictv_init,
};
shagadelictv_type = if (gst_structure_get_int (structure, "width", &filter->width) &&
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV", gst_structure_get_int (structure, "height", &filter->height)) {
&shagadelictv_info, 0); gint area = filter->width * filter->height;
}
return shagadelictv_type;
}
static GstVideofilterFormat gst_shagadelictv_formats[] = { g_free (filter->ripple);
{"RGB ", 32, gst_shagadelictv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, g_free (filter->spiral);
0xff000000}
};
static void filter->ripple = (gchar *) g_malloc (area * 4);
gst_shagadelictv_base_init (gpointer g_class) filter->spiral = (gchar *) g_malloc (area);
{
/* elementfactory information */
static GstElementDetails gst_shagadelictv_details =
GST_ELEMENT_DETAILS ("ShagadelicTV",
"Filter/Effect/Video",
"Oh behave, ShagedelicTV makes images shagadelic!",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); gst_shagadelic_initialize (filter);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); ret = TRUE;
int i;
gst_element_class_set_details (element_class, &gst_shagadelictv_details);
for (i = 0; i < G_N_ELEMENTS (gst_shagadelictv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_shagadelictv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_shagadelictv_class_init (gpointer g_class, gpointer class_data) gst_shagadelictv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
{ guint * size)
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_shagadelictv_set_property;
gobject_class->get_property = gst_shagadelictv_get_property;
videofilter_class->setup = gst_shagadelictv_setup;
}
static void
gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
{
GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
filter->ripple = NULL;
filter->spiral = NULL;
}
static void
gst_shagadelictv_setup (GstVideofilter * videofilter)
{ {
GstShagadelicTV *filter; GstShagadelicTV *filter;
int width = gst_videofilter_get_input_width (videofilter); GstStructure *structure;
int height = gst_videofilter_get_input_height (videofilter); gboolean ret = FALSE;
int area; gint width, height;
g_return_if_fail (GST_IS_SHAGADELICTV (videofilter)); filter = GST_SHAGADELICTV (btrans);
filter = GST_SHAGADELICTV (videofilter);
filter->width = width; structure = gst_caps_get_structure (caps, 0);
filter->height = height;
area = filter->width * filter->height; if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
g_free (filter->ripple); return ret;
g_free (filter->spiral);
filter->ripple = (gchar *) g_malloc (area * 4);
filter->spiral = (gchar *) g_malloc (area);
gst_shagadelic_initialize (filter);
} }
static unsigned int static unsigned int
@ -261,8 +210,9 @@ gst_shagadelic_initialize (GstShagadelicTV * filter)
filter->phase = 0; filter->phase = 0;
} }
static void static GstFlowReturn
gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s) gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{ {
GstShagadelicTV *filter; GstShagadelicTV *filter;
guint32 *src, *dest; guint32 *src, *dest;
@ -270,11 +220,14 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
guint32 v; guint32 v;
guchar r, g, b; guchar r, g, b;
gint width, height; gint width, height;
GstFlowReturn ret = GST_FLOW_OK;
filter = GST_SHAGADELICTV (videofilter); filter = GST_SHAGADELICTV (trans);
src = (guint32 *) s; gst_buffer_stamp (out, in);
dest = (guint32 *) d;
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width; width = filter->width;
height = filter->height; height = filter->height;
@ -310,37 +263,72 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
filter->ry += filter->rvy; filter->ry += filter->rvy;
filter->bx += filter->bvx; filter->bx += filter->bvx;
filter->by += filter->bvy; filter->by += filter->bvy;
return ret;
} }
static void static void
gst_shagadelictv_set_property (GObject * object, guint prop_id, gst_shagadelictv_base_init (gpointer g_class)
const GValue * value, GParamSpec * pspec)
{ {
GstShagadelicTV *filter; GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
g_return_if_fail (GST_IS_SHAGADELICTV (object)); gst_element_class_set_details (element_class, &shagadelictv_details);
filter = GST_SHAGADELICTV (object); gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_shagadelictv_sink_template));
switch (prop_id) { gst_element_class_add_pad_template (element_class,
default: gst_static_pad_template_get (&gst_shagadelictv_src_template));
break;
}
} }
static void static void
gst_shagadelictv_get_property (GObject * object, guint prop_id, GValue * value, gst_shagadelictv_class_init (gpointer klass, gpointer class_data)
GParamSpec * pspec)
{ {
GstShagadelicTV *filter; GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
g_return_if_fail (GST_IS_SHAGADELICTV (object)); gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
filter = GST_SHAGADELICTV (object); parent_class = g_type_class_peek_parent (klass);
switch (prop_id) { trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps);
default: trans_class->get_unit_size =
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); GST_DEBUG_FUNCPTR (gst_shagadelictv_get_unit_size);
break; trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform);
} }
static void
gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
{
GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
filter->ripple = NULL;
filter->spiral = NULL;
}
GType
gst_shagadelictv_get_type (void)
{
static GType shagadelictv_type = 0;
if (!shagadelictv_type) {
static const GTypeInfo shagadelictv_info = {
sizeof (GstShagadelicTVClass),
gst_shagadelictv_base_init,
NULL,
(GClassInitFunc) gst_shagadelictv_class_init,
NULL,
NULL,
sizeof (GstShagadelicTV),
0,
(GInstanceInitFunc) gst_shagadelictv_init,
};
shagadelictv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV",
&shagadelictv_info, 0);
}
return shagadelictv_type;
} }

View file

@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"
#endif #endif
#include <gstvideofilter.h>
#include <math.h> #include <math.h>
#include <string.h> #include <string.h>
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gst/video/video.h>
#define GST_TYPE_VERTIGOTV \ #define GST_TYPE_VERTIGOTV \
(gst_vertigotv_get_type()) (gst_vertigotv_get_type())
@ -61,18 +64,11 @@ struct _GstVertigoTV
struct _GstVertigoTVClass struct _GstVertigoTVClass
{ {
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
void (*reset) (GstElement * element);
}; };
GType gst_vertigotv_get_type (void);
/* Filter signals and args */ /* Filter signals and args */
enum
{
/* FILL ME */
RESET_SIGNAL,
LAST_SIGNAL
};
enum enum
{ {
ARG_0, ARG_0,
@ -80,152 +76,77 @@ enum
ARG_ZOOM_SPEED ARG_ZOOM_SPEED
}; };
static void gst_vertigotv_base_init (gpointer g_class); static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
static void gst_vertigotv_class_init (GstVertigoTVClass * klass, "Filter/Effect/Video",
gpointer class_data); "A loopback alpha blending effector with rotating and scaling",
static void gst_vertigotv_init (GTypeInstance * instance, gpointer g_class); "Wim Taymans <wim.taymans@chello.be>");
static void gst_vertigotv_setup (GstVideofilter * videofilter);
static void gst_vertigotv_reset_handler (GstElement * element); static GstStaticPadTemplate gst_vertigotv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
);
static void gst_vertigotv_set_property (GObject * object, guint prop_id, static GstStaticPadTemplate gst_vertigotv_sink_template =
const GValue * value, GParamSpec * pspec); GST_STATIC_PAD_TEMPLATE ("sink",
static void gst_vertigotv_get_property (GObject * object, guint prop_id, GST_PAD_SINK,
GValue * value, GParamSpec * pspec); GST_PAD_ALWAYS,
static void gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
void *s); );
static guint gst_vertigotv_signals[LAST_SIGNAL] = { 0 }; static GstVideofilterClass *parent_class = NULL;
GType static gboolean
gst_vertigotv_get_type (void) gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
static GType vertigotv_type = 0; GstVertigoTV *filter = GST_VERTIGOTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!vertigotv_type) { structure = gst_caps_get_structure (incaps, 0);
static const GTypeInfo vertigotv_info = {
sizeof (GstVertigoTVClass),
gst_vertigotv_base_init,
NULL,
(GClassInitFunc) gst_vertigotv_class_init,
NULL,
NULL,
sizeof (GstVertigoTV),
0,
(GInstanceInitFunc) gst_vertigotv_init,
};
vertigotv_type = if (gst_structure_get_int (structure, "width", &filter->width) &&
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV", gst_structure_get_int (structure, "height", &filter->height)) {
&vertigotv_info, 0); gint area = filter->width * filter->height;
}
return vertigotv_type;
}
static GstVideofilterFormat gst_vertigotv_formats[] = { g_free (filter->buffer);
{"RGB ", 32, gst_vertigotv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
0xff000000}
};
static void memset (filter->buffer, 0, area * 2 * sizeof (guint32));
gst_vertigotv_base_init (gpointer g_class) filter->current_buffer = filter->buffer;
{ filter->alt_buffer = filter->buffer + area;
/* elementfactory information */ filter->phase = 0;
static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
"Filter/Effect/Video",
"A loopback alpha blending effector with rotating and scaling",
"Wim Taymans <wim.taymans@chello.be>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &vertigotv_details); ret = TRUE;
for (i = 0; i < G_N_ELEMENTS (gst_vertigotv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_vertigotv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_vertigotv_class_init (GstVertigoTVClass * klass, gpointer class_data) gst_vertigotv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
{ guint * size)
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstVideofilterClass *videofilter_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
videofilter_class = GST_VIDEOFILTER_CLASS (klass);
gst_vertigotv_signals[RESET_SIGNAL] =
g_signal_new ("reset-parms",
G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstVertigoTVClass, reset),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
klass->reset = gst_vertigotv_reset_handler;
gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
g_param_spec_float ("speed", "Speed", "Control the speed of movement",
0.01, 100.0, 0.02, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
g_param_spec_float ("zoom_speed", "Zoom Speed",
"Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
videofilter_class->setup = gst_vertigotv_setup;
}
static void
gst_vertigotv_reset_handler (GstElement * element)
{
GstVertigoTV *filter = GST_VERTIGOTV (element);
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
}
static void
gst_vertigotv_setup (GstVideofilter * videofilter)
{ {
GstVertigoTV *filter; GstVertigoTV *filter;
gint area; GstStructure *structure;
int width = gst_videofilter_get_input_width (videofilter); gboolean ret = FALSE;
int height = gst_videofilter_get_input_height (videofilter); gint width, height;
g_return_if_fail (GST_IS_VERTIGOTV (videofilter)); filter = GST_VERTIGOTV (btrans);
filter = GST_VERTIGOTV (videofilter);
filter->width = width; structure = gst_caps_get_structure (caps, 0);
filter->height = height;
area = width * height; if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = width * height * 32 / 8;
ret = TRUE;
GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
g_free (filter->buffer); return ret;
filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
memset (filter->buffer, 0, area * 2 * sizeof (guint32));
filter->current_buffer = filter->buffer;
filter->alt_buffer = filter->buffer + area;
filter->phase = 0;
}
static void
gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
{
GstVertigoTV *filter = GST_VERTIGOTV (instance);
filter->buffer = NULL;
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
} }
static void static void
@ -276,22 +197,22 @@ gst_vertigotv_set_parms (GstVertigoTV * filter)
filter->phase = 0; filter->phase = 0;
} }
static void static GstFlowReturn
gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s) gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{ {
GstVertigoTV *filter; GstVertigoTV *filter;
guint32 *src, *dest; guint32 *src, *dest, *p;
guint32 *p;
guint32 v; guint32 v;
gint x, y; gint x, y, ox, oy, i, width, height, area;
gint ox, oy; GstFlowReturn ret = GST_FLOW_OK;
gint i;
gint width, height, area;
filter = GST_VERTIGOTV (videofilter); filter = GST_VERTIGOTV (trans);
src = (guint32 *) s; gst_buffer_stamp (out, in);
dest = (guint32 *) d;
src = (guint32 *) GST_BUFFER_DATA (in);
dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width; width = filter->width;
height = filter->height; height = filter->height;
@ -327,6 +248,8 @@ gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = filter->current_buffer; p = filter->current_buffer;
filter->current_buffer = filter->alt_buffer; filter->current_buffer = filter->alt_buffer;
filter->alt_buffer = p; filter->alt_buffer = p;
return ret;
} }
static void static void
@ -373,3 +296,80 @@ gst_vertigotv_get_property (GObject * object, guint prop_id, GValue * value,
break; break;
} }
} }
static void
gst_vertigotv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &vertigotv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_vertigotv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_vertigotv_src_template));
}
static void
gst_vertigotv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
g_param_spec_float ("speed", "Speed", "Control the speed of movement",
0.01, 100.0, 0.02, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
g_param_spec_float ("zoom_speed", "Zoom Speed",
"Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_vertigotv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform);
}
static void
gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
{
GstVertigoTV *filter = GST_VERTIGOTV (instance);
filter->buffer = NULL;
filter->phase = 0.0;
filter->phase_increment = 0.02;
filter->zoomrate = 1.01;
}
GType
gst_vertigotv_get_type (void)
{
static GType vertigotv_type = 0;
if (!vertigotv_type) {
static const GTypeInfo vertigotv_info = {
sizeof (GstVertigoTVClass),
gst_vertigotv_base_init,
NULL,
(GClassInitFunc) gst_vertigotv_class_init,
NULL,
NULL,
sizeof (GstVertigoTV),
0,
(GInstanceInitFunc) gst_vertigotv_init,
};
vertigotv_type =
g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV",
&vertigotv_info, 0);
}
return vertigotv_type;
}

View file

@ -37,17 +37,17 @@
#include "config.h" #include "config.h"
#endif #endif
#include <gst/gst.h>
#include <gstvideofilter.h> #include <gstvideofilter.h>
#include <string.h> #include <string.h>
#include <math.h> #include <math.h>
#include "gsteffectv.h"
#include <gst/video/video.h>
#ifndef M_PI #ifndef M_PI
#define M_PI 3.14159265358979323846 #define M_PI 3.14159265358979323846
#endif #endif
#define GST_TYPE_WARPTV \ #define GST_TYPE_WARPTV \
(gst_warptv_get_type()) (gst_warptv_get_type())
#define GST_WARPTV(obj) \ #define GST_WARPTV(obj) \
@ -79,196 +79,85 @@ struct _GstWarpTVClass
GstVideofilterClass parent_class; GstVideofilterClass parent_class;
}; };
GType gst_warptv_get_type (void);
/* GstWarpTV signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
/* FILL ME */
};
static void gst_warptv_base_init (gpointer g_class);
static void gst_warptv_class_init (gpointer g_class, gpointer class_data);
static void gst_warptv_init (GTypeInstance * instance, gpointer g_class);
static void gst_warptv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_warptv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_warptv_setup (GstVideofilter * videofilter);
static void initSinTable (GstWarpTV * filter); static void initSinTable (GstWarpTV * filter);
static void initOffsTable (GstWarpTV * filter); static void initOffsTable (GstWarpTV * filter);
static void initDistTable (GstWarpTV * filter); static void initDistTable (GstWarpTV * filter);
static void gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
GType static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV",
gst_warptv_get_type (void) "Filter/Effect/Video",
"WarpTV does realtime goo'ing of the video input",
"Sam Lantinga <slouken@devolution.com>");
static GstStaticPadTemplate gst_warptv_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstStaticPadTemplate gst_warptv_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
);
static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
static GType warptv_type = 0; GstWarpTV *filter = GST_WARPTV (btrans);
GstStructure *structure;
gboolean ret = FALSE;
if (!warptv_type) { structure = gst_caps_get_structure (incaps, 0);
static const GTypeInfo warptv_info = {
sizeof (GstWarpTVClass),
gst_warptv_base_init,
NULL,
gst_warptv_class_init,
NULL,
NULL,
sizeof (GstWarpTV),
0,
gst_warptv_init,
};
warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, if (gst_structure_get_int (structure, "width", &filter->width) &&
"GstWarpTV", &warptv_info, 0); gst_structure_get_int (structure, "height", &filter->height)) {
} g_free (filter->disttable);
return warptv_type; g_free (filter->offstable);
}
static GstVideofilterFormat gst_warptv_formats[] = { filter->offstable = g_malloc (filter->height * sizeof (guint32));
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00, filter->disttable =
0x000000ff}, g_malloc (filter->width * filter->height * sizeof (guint32));
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
0x0000ff00},
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
0x00ff0000},
{"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
0xff000000},
};
static void initSinTable (filter);
gst_warptv_base_init (gpointer g_class) initOffsTable (filter);
{ initDistTable (filter);
static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV", ret = TRUE;
"Filter/Effect/Video",
"WarpTV does realtime goo'ing of the video input",
"Sam Lantinga <slouken@devolution.com>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
int i;
gst_element_class_set_details (element_class, &warptv_details);
for (i = 0; i < G_N_ELEMENTS (gst_warptv_formats); i++) {
gst_videofilter_class_add_format (videofilter_class,
gst_warptv_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); return ret;
} }
static void static gboolean
gst_warptv_class_init (gpointer g_class, gpointer class_data) gst_warptv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
guint * size)
{ {
GObjectClass *gobject_class; GstWarpTV *filter;
GstVideofilterClass *videofilter_class; GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
gobject_class = G_OBJECT_CLASS (g_class); filter = GST_WARPTV (btrans);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_warptv_set_property; structure = gst_caps_get_structure (caps, 0);
gobject_class->get_property = gst_warptv_get_property;
#if 0 if (gst_structure_get_int (structure, "width", &width) &&
g_object_class_install_property (gobject_class, ARG_METHOD, gst_structure_get_int (structure, "height", &height)) {
g_param_spec_enum ("method", "method", "method", *size = width * height * 32 / 8;
GST_TYPE_WARPTV_METHOD, GST_WARPTV_METHOD_1, G_PARAM_READWRITE)); ret = TRUE;
#endif GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
width, height);
videofilter_class->setup = gst_warptv_setup;
}
static void
gst_warptv_init (GTypeInstance * instance, gpointer g_class)
{
GstWarpTV *warptv = GST_WARPTV (instance);
GstVideofilter *videofilter;
GST_DEBUG ("gst_warptv_init");
videofilter = GST_VIDEOFILTER (warptv);
/* do stuff */
}
static void
gst_warptv_set_property (GObject * object, guint prop_id, const GValue * value,
GParamSpec * pspec)
{
GstWarpTV *src;
g_return_if_fail (GST_IS_WARPTV (object));
src = GST_WARPTV (object);
GST_DEBUG ("gst_warptv_set_property");
switch (prop_id) {
#if 0
case ARG_METHOD:
src->method = g_value_get_enum (value);
break;
#endif
default:
break;
} }
}
static void return ret;
gst_warptv_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstWarpTV *src;
g_return_if_fail (GST_IS_WARPTV (object));
src = GST_WARPTV (object);
switch (prop_id) {
#if 0
case ARG_METHOD:
g_value_set_enum (value, src->method);
break;
#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_warptv_setup (GstVideofilter * videofilter)
{
GstWarpTV *warptv;
int width = gst_videofilter_get_input_width (videofilter);
int height = gst_videofilter_get_input_height (videofilter);
g_return_if_fail (GST_IS_WARPTV (videofilter));
warptv = GST_WARPTV (videofilter);
/* if any setup needs to be done, do it here */
warptv->width = width;
warptv->height = height;
#if 0
/* FIXME this should be reset in PAUSE->READY, not here */
warptv->tval = 0;
#endif
g_free (warptv->disttable);
g_free (warptv->offstable);
warptv->offstable = g_malloc (height * sizeof (guint32));
warptv->disttable = g_malloc (width * height * sizeof (guint32));
initSinTable (warptv);
initOffsTable (warptv);
initDistTable (warptv);
} }
static void static void
@ -323,21 +212,21 @@ initDistTable (GstWarpTV * filter)
#endif #endif
} }
static void static GstFlowReturn
gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s) gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{ {
GstWarpTV *warptv; GstWarpTV *warptv = GST_WARPTV (trans);
int width = gst_videofilter_get_input_width (videofilter); int width = warptv->width;
int height = gst_videofilter_get_input_height (videofilter); int height = warptv->height;
guint32 *src = s; guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
guint32 *dest = d; guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint xw, yw, cw; gint xw, yw, cw;
gint32 c, i, x, y, dx, dy, maxx, maxy; gint32 c, i, x, y, dx, dy, maxx, maxy;
gint32 skip, *ctptr, *distptr; gint32 skip, *ctptr, *distptr;
gint32 *sintable, *ctable; gint32 *sintable, *ctable;
GstFlowReturn ret = GST_FLOW_OK;
g_return_if_fail (GST_IS_WARPTV (videofilter)); gst_buffer_stamp (out, in);
warptv = GST_WARPTV (videofilter);
xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30); xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35); yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35);
@ -383,4 +272,66 @@ gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
} }
warptv->tval = (warptv->tval + 1) & 511; warptv->tval = (warptv->tval + 1) & 511;
return ret;
}
static void
gst_warptv_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &warptv_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_warptv_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_warptv_src_template));
}
static void
gst_warptv_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_warptv_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform);
}
static void
gst_warptv_init (GTypeInstance * instance, gpointer g_class)
{
}
GType
gst_warptv_get_type (void)
{
static GType warptv_type = 0;
if (!warptv_type) {
static const GTypeInfo warptv_info = {
sizeof (GstWarpTVClass),
gst_warptv_base_init,
NULL,
gst_warptv_class_init,
NULL,
NULL,
sizeof (GstWarpTV),
0,
gst_warptv_init,
};
warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstWarpTV", &warptv_info, 0);
}
return warptv_type;
} }

View file

@ -1,5 +1,5 @@
lib_LTLIBRARIES = libgstvideofilter-@GST_MAJORMINOR@.la lib_LTLIBRARIES = libgstvideofilter-@GST_MAJORMINOR@.la
noinst_LTLIBRARIES = libgstvideoexample.la # noinst_LTLIBRARIES = libgstvideoexample.la
plugin_LTLIBRARIES = libgstvideoflip.la plugin_LTLIBRARIES = libgstvideoflip.la
noinst_HEADERS = gstvideofilter.h gstvideoflip.h noinst_HEADERS = gstvideofilter.h gstvideoflip.h
@ -11,14 +11,16 @@ libgstvideofilter_@GST_MAJORMINOR@_la_SOURCES = gstvideofilter.c gstvideofilter.
libgstvideofilter_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) libgstvideofilter_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS)
libgstvideofilter_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIBS) libgstvideofilter_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIBS)
libgstvideoexample_la_SOURCES = gstvideoexample.c # libgstvideoexample_la_SOURCES = gstvideoexample.c
libgstvideoexample_la_CFLAGS = $(GST_CFLAGS) # libgstvideoexample_la_CFLAGS = $(GST_CFLAGS)
libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS) # libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) # libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideoflip_la_SOURCES = gstvideoflip.c libgstvideoflip_la_SOURCES = gstvideoflip.c
libgstvideoflip_la_CFLAGS = $(GST_CFLAGS) libgstvideoflip_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS) $(GST_PLUGINS_BASE_CFLAGS)
libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS) \
$(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS)
libgstvideoflip_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstvideoflip_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
# libgstvideobalance_la_SOURCES = gstvideobalance.c # libgstvideobalance_la_SOURCES = gstvideobalance.c

View file

@ -27,7 +27,6 @@
#include "config.h" #include "config.h"
#endif #endif
/*#define DEBUG_ENABLED */
#include "gstvideobalance.h" #include "gstvideobalance.h"
#ifdef HAVE_LIBOIL #ifdef HAVE_LIBOIL
#include <liboil/liboil.h> #include <liboil/liboil.h>
@ -38,12 +37,6 @@
#include <gst/colorbalance/colorbalance.h> #include <gst/colorbalance/colorbalance.h>
/* GstVideobalance signals and args */ /* GstVideobalance signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum enum
{ {
ARG_0, ARG_0,
@ -117,11 +110,6 @@ gst_videobalance_get_type (void)
return videobalance_type; return videobalance_type;
} }
static GstVideofilterFormat gst_videobalance_formats[] = {
{"I420", 12, gst_videobalance_planar411,},
};
static void static void
gst_videobalance_base_init (gpointer g_class) gst_videobalance_base_init (gpointer g_class)
{ {

View file

@ -21,12 +21,8 @@
#ifndef __GST_VIDEOBALANCE_H__ #ifndef __GST_VIDEOBALANCE_H__
#define __GST_VIDEOBALANCE_H__ #define __GST_VIDEOBALANCE_H__
#include <gst/gst.h>
#include "gstvideofilter.h" #include "gstvideofilter.h"
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_VIDEOBALANCE \ #define GST_TYPE_VIDEOBALANCE \

View file

@ -22,40 +22,15 @@
#include "config.h" #include "config.h"
#endif #endif
#include <string.h>
/*#define DEBUG_ENABLED */
#include "gstvideofilter.h" #include "gstvideofilter.h"
GST_DEBUG_CATEGORY_STATIC (gst_videofilter_debug); GST_DEBUG_CATEGORY_STATIC (gst_videofilter_debug);
#define GST_CAT_DEFAULT gst_videofilter_debug #define GST_CAT_DEFAULT gst_videofilter_debug
/* GstVideofilter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_METHOD
/* FILL ME */
};
static void gst_videofilter_base_init (gpointer g_class);
static void gst_videofilter_class_init (gpointer g_class, gpointer class_data); static void gst_videofilter_class_init (gpointer g_class, gpointer class_data);
static void gst_videofilter_init (GTypeInstance * instance, gpointer g_class); static void gst_videofilter_init (GTypeInstance * instance, gpointer g_class);
static void gst_videofilter_set_property (GObject * object, guint prop_id, static GstBaseTransformClass *parent_class = NULL;
const GValue * value, GParamSpec * pspec);
static void gst_videofilter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_videofilter_chain (GstPad * pad, GstBuffer * buffer);
GstCaps *gst_videofilter_class_get_capslist (GstVideofilterClass * klass);
static GstElementClass *parent_class = NULL;
GType GType
gst_videofilter_get_type (void) gst_videofilter_get_type (void)
@ -65,7 +40,7 @@ gst_videofilter_get_type (void)
if (!videofilter_type) { if (!videofilter_type) {
static const GTypeInfo videofilter_info = { static const GTypeInfo videofilter_info = {
sizeof (GstVideofilterClass), sizeof (GstVideofilterClass),
gst_videofilter_base_init, NULL,
NULL, NULL,
gst_videofilter_class_init, gst_videofilter_class_init,
NULL, NULL,
@ -75,484 +50,37 @@ gst_videofilter_get_type (void)
gst_videofilter_init, gst_videofilter_init,
}; };
videofilter_type = g_type_register_static (GST_TYPE_ELEMENT, videofilter_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstVideofilter", &videofilter_info, G_TYPE_FLAG_ABSTRACT); "GstVideofilter", &videofilter_info, G_TYPE_FLAG_ABSTRACT);
} }
return videofilter_type; return videofilter_type;
} }
static void
gst_videofilter_base_init (gpointer g_class)
{
static GstElementDetails videofilter_details = {
"Video scaler",
"Filter/Effect/Video",
"Resizes video",
"David Schleef <ds@schleef.org>"
};
GstVideofilterClass *klass = (GstVideofilterClass *) g_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
klass->formats = g_ptr_array_new ();
gst_element_class_set_details (element_class, &videofilter_details);
}
static void static void
gst_videofilter_class_init (gpointer g_class, gpointer class_data) gst_videofilter_class_init (gpointer g_class, gpointer class_data)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
GstBaseTransformClass *trans_class;
GstVideofilterClass *klass; GstVideofilterClass *klass;
klass = (GstVideofilterClass *) g_class; klass = (GstVideofilterClass *) g_class;
gobject_class = (GObjectClass *) klass; gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT); parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_videofilter_set_property;
gobject_class->get_property = gst_videofilter_get_property;
GST_DEBUG_CATEGORY_INIT (gst_videofilter_debug, "videofilter", 0, GST_DEBUG_CATEGORY_INIT (gst_videofilter_debug, "videofilter", 0,
"videofilter"); "videofilter");
} }
static GstStructure *
gst_videofilter_format_get_structure (GstVideofilterFormat * format)
{
unsigned int fourcc;
GstStructure *structure;
if (format->filter_func == NULL)
return NULL;
fourcc =
GST_MAKE_FOURCC (format->fourcc[0], format->fourcc[1], format->fourcc[2],
format->fourcc[3]);
if (format->depth) {
structure = gst_structure_new ("video/x-raw-rgb",
"depth", G_TYPE_INT, format->depth,
"bpp", G_TYPE_INT, format->bpp,
"endianness", G_TYPE_INT, format->endianness,
"red_mask", G_TYPE_INT, format->red_mask,
"green_mask", G_TYPE_INT, format->green_mask,
"blue_mask", G_TYPE_INT, format->blue_mask, NULL);
} else {
structure = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fourcc, NULL);
}
gst_structure_set (structure,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
return structure;
}
GstCaps *
gst_videofilter_class_get_capslist (GstVideofilterClass * klass)
{
GstCaps *caps;
GstStructure *structure;
int i;
caps = gst_caps_new_empty ();
for (i = 0; i < klass->formats->len; i++) {
structure =
gst_videofilter_format_get_structure (g_ptr_array_index (klass->formats,
i));
gst_caps_append_structure (caps, structure);
}
return caps;
}
static GstCaps *
gst_videofilter_getcaps (GstPad * pad)
{
GstVideofilter *videofilter;
GstVideofilterClass *klass;
GstCaps *caps;
GstPad *peer;
int i;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_getcaps");
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
/* we can handle anything that was registered */
caps = gst_caps_new_empty ();
for (i = 0; i < klass->formats->len; i++) {
GstCaps *fromcaps;
fromcaps =
gst_caps_new_full (gst_videofilter_format_get_structure
(g_ptr_array_index (klass->formats, i)), NULL);
gst_caps_append (caps, fromcaps);
}
peer = gst_pad_get_peer (pad);
if (peer) {
GstCaps *peercaps;
peercaps = gst_pad_get_caps (peer);
if (peercaps) {
GstCaps *icaps;
icaps = gst_caps_intersect (peercaps, caps);
gst_caps_unref (peercaps);
gst_caps_unref (caps);
caps = icaps;
}
//gst_object_unref (peer);
}
return caps;
}
static gboolean
gst_videofilter_setcaps (GstPad * pad, GstCaps * caps)
{
GstVideofilter *videofilter;
GstStructure *structure;
int width, height;
const GValue *framerate;
int ret;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
structure = gst_caps_get_structure (caps, 0);
videofilter->format =
gst_videofilter_find_format_by_structure (videofilter, structure);
g_return_val_if_fail (videofilter->format, GST_PAD_LINK_REFUSED);
ret = gst_structure_get_int (structure, "width", &width);
ret &= gst_structure_get_int (structure, "height", &height);
framerate = gst_structure_get_value (structure, "framerate");
ret &= (framerate != NULL && GST_VALUE_HOLDS_FRACTION (framerate));
if (!ret)
return FALSE;
gst_pad_set_caps (videofilter->srcpad, caps);
GST_DEBUG_OBJECT (videofilter, "width %d height %d", width, height);
#if 0
if (pad == videofilter->srcpad) {
videofilter->to_width = width;
videofilter->to_height = height;
} else {
videofilter->from_width = width;
videofilter->from_height = height;
}
#endif
videofilter->to_width = width;
videofilter->to_height = height;
videofilter->from_width = width;
videofilter->from_height = height;
g_value_copy (framerate, &videofilter->framerate);
gst_videofilter_setup (videofilter);
return TRUE;
}
static void static void
gst_videofilter_init (GTypeInstance * instance, gpointer g_class) gst_videofilter_init (GTypeInstance * instance, gpointer g_class)
{ {
GstVideofilter *videofilter = GST_VIDEOFILTER (instance); GstVideofilter *videofilter = GST_VIDEOFILTER (instance);
GstPadTemplate *pad_template;
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_init"); GST_DEBUG_OBJECT (videofilter, "gst_videofilter_init");
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "sink");
g_return_if_fail (pad_template != NULL);
videofilter->sinkpad = gst_pad_new_from_template (pad_template, "sink");
gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->sinkpad);
gst_pad_set_chain_function (videofilter->sinkpad, gst_videofilter_chain);
gst_pad_set_setcaps_function (videofilter->sinkpad, gst_videofilter_setcaps);
gst_pad_set_getcaps_function (videofilter->sinkpad, gst_videofilter_getcaps);
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "src");
g_return_if_fail (pad_template != NULL);
videofilter->srcpad = gst_pad_new_from_template (pad_template, "src");
gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->srcpad);
gst_pad_set_getcaps_function (videofilter->srcpad, gst_videofilter_getcaps);
videofilter->inited = FALSE; videofilter->inited = FALSE;
g_value_init (&videofilter->framerate, GST_TYPE_FRACTION);
}
static GstFlowReturn
gst_videofilter_chain (GstPad * pad, GstBuffer * buf)
{
GstVideofilter *videofilter;
guchar *data;
gulong size;
GstBuffer *outbuf;
GstFlowReturn ret;
videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_chain");
if (videofilter->passthru) {
return gst_pad_push (videofilter->srcpad, buf);
}
if (GST_PAD_CAPS (pad) == NULL) {
return GST_FLOW_NOT_NEGOTIATED;
}
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
GST_LOG_OBJECT (videofilter, "got buffer of %ld bytes in '%s'", size,
GST_OBJECT_NAME (videofilter));
GST_LOG_OBJECT (videofilter,
"size=%ld from=%dx%d to=%dx%d fromsize=%ld (should be %d) tosize=%d",
size, videofilter->from_width, videofilter->from_height,
videofilter->to_width, videofilter->to_height, size,
videofilter->from_buf_size, videofilter->to_buf_size);
if (size > videofilter->from_buf_size) {
GST_INFO_OBJECT (videofilter, "buffer size %ld larger than expected (%d)",
size, videofilter->from_buf_size);
return GST_FLOW_ERROR;
}
ret = gst_pad_alloc_buffer (videofilter->srcpad, GST_BUFFER_OFFSET_NONE,
videofilter->to_buf_size, GST_PAD_CAPS (videofilter->srcpad), &outbuf);
if (ret != GST_FLOW_OK)
goto no_buffer;
g_return_val_if_fail (GST_BUFFER_DATA (outbuf), GST_FLOW_ERROR);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
g_return_val_if_fail (videofilter->format, GST_FLOW_ERROR);
GST_DEBUG_OBJECT (videofilter, "format %s", videofilter->format->fourcc);
videofilter->in_buf = buf;
videofilter->out_buf = outbuf;
videofilter->format->filter_func (videofilter, GST_BUFFER_DATA (outbuf),
data);
gst_buffer_unref (buf);
GST_LOG_OBJECT (videofilter, "pushing buffer of %d bytes in '%s'",
GST_BUFFER_SIZE (outbuf), GST_OBJECT_NAME (videofilter));
ret = gst_pad_push (videofilter->srcpad, outbuf);
return ret;
no_buffer:
{
return ret;
}
}
static void
gst_videofilter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFILTER (object));
videofilter = GST_VIDEOFILTER (object);
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_videofilter_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFILTER (object));
videofilter = GST_VIDEOFILTER (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
int
gst_videofilter_get_input_width (GstVideofilter * videofilter)
{
g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
return videofilter->from_width;
}
int
gst_videofilter_get_input_height (GstVideofilter * videofilter)
{
g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
return videofilter->from_height;
}
void
gst_videofilter_set_output_size (GstVideofilter * videofilter,
int width, int height)
{
GstCaps *srccaps;
GstStructure *structure;
g_return_if_fail (GST_IS_VIDEOFILTER (videofilter));
videofilter->to_width = width;
videofilter->to_height = height;
videofilter->to_buf_size = (videofilter->to_width * videofilter->to_height
* videofilter->format->bpp) / 8;
//srccaps = gst_caps_copy (gst_pad_get_negotiated_caps (videofilter->srcpad));
srccaps = gst_caps_copy (GST_PAD_CAPS (videofilter->srcpad));
structure = gst_caps_get_structure (srccaps, 0);
gst_structure_set (structure, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, NULL);
gst_pad_set_caps (videofilter->srcpad, srccaps);
}
void
gst_videofilter_setup (GstVideofilter * videofilter)
{
GstVideofilterClass *klass;
GST_DEBUG_OBJECT (videofilter, "setup");
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
if (klass->setup) {
GST_DEBUG_OBJECT (videofilter, "calling class setup method");
klass->setup (videofilter);
}
if (videofilter->to_width == 0) {
videofilter->to_width = videofilter->from_width;
}
if (videofilter->to_height == 0) {
videofilter->to_height = videofilter->from_height;
}
g_return_if_fail (videofilter->format != NULL);
g_return_if_fail (videofilter->from_width > 0);
g_return_if_fail (videofilter->from_height > 0);
g_return_if_fail (videofilter->to_width > 0);
g_return_if_fail (videofilter->to_height > 0);
videofilter->from_buf_size =
(videofilter->from_width * videofilter->from_height *
videofilter->format->bpp) / 8;
videofilter->to_buf_size =
(videofilter->to_width * videofilter->to_height *
videofilter->format->bpp) / 8;
GST_DEBUG_OBJECT (videofilter, "from_buf_size %d to_buf_size %d",
videofilter->from_buf_size, videofilter->to_buf_size);
videofilter->inited = TRUE;
}
GstVideofilterFormat *
gst_videofilter_find_format_by_structure (GstVideofilter * videofilter,
const GstStructure * structure)
{
int i;
GstVideofilterClass *klass;
GstVideofilterFormat *format;
gboolean ret;
klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
g_return_val_if_fail (structure != NULL, NULL);
if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0) {
guint32 fourcc;
ret = gst_structure_get_fourcc (structure, "format", &fourcc);
if (!ret)
return NULL;
for (i = 0; i < klass->formats->len; i++) {
guint32 format_fourcc;
format = g_ptr_array_index (klass->formats, i);
format_fourcc = GST_STR_FOURCC (format->fourcc);
if (format->depth == 0 && format_fourcc == fourcc) {
return format;
}
}
} else if (strcmp (gst_structure_get_name (structure), "video/x-raw-rgb")
== 0) {
int bpp;
int depth;
int endianness;
int red_mask;
int green_mask;
int blue_mask;
ret = gst_structure_get_int (structure, "bpp", &bpp);
ret &= gst_structure_get_int (structure, "depth", &depth);
ret &= gst_structure_get_int (structure, "endianness", &endianness);
ret &= gst_structure_get_int (structure, "red_mask", &red_mask);
ret &= gst_structure_get_int (structure, "green_mask", &green_mask);
ret &= gst_structure_get_int (structure, "blue_mask", &blue_mask);
if (!ret)
return NULL;
for (i = 0; i < klass->formats->len; i++) {
format = g_ptr_array_index (klass->formats, i);
if (format->bpp == bpp && format->depth == depth &&
format->endianness == endianness && format->red_mask == red_mask &&
format->green_mask == green_mask && format->blue_mask == blue_mask) {
return format;
}
}
}
return NULL;
}
void
gst_videofilter_class_add_format (GstVideofilterClass * videofilterclass,
GstVideofilterFormat * format)
{
g_ptr_array_add (videofilterclass->formats, format);
}
void
gst_videofilter_class_add_pad_templates (GstVideofilterClass *
videofilter_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (videofilter_class);
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_videofilter_class_get_capslist (videofilter_class)));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_videofilter_class_get_capslist (videofilter_class)));
} }

View file

@ -21,32 +21,13 @@
#ifndef __GST_VIDEOFILTER_H__ #ifndef __GST_VIDEOFILTER_H__
#define __GST_VIDEOFILTER_H__ #define __GST_VIDEOFILTER_H__
#include <gst/base/gstbasetransform.h>
#include <gst/gst.h>
G_BEGIN_DECLS G_BEGIN_DECLS
typedef struct _GstVideofilter GstVideofilter; typedef struct _GstVideofilter GstVideofilter;
typedef struct _GstVideofilterClass GstVideofilterClass; typedef struct _GstVideofilterClass GstVideofilterClass;
typedef void (*GstVideofilterFilterFunc)(GstVideofilter *filter,
void *out_data, void *in_data);
typedef void (*GstVideofilterSetupFunc)(GstVideofilter *filter);
typedef struct _GstVideofilterFormat GstVideofilterFormat;
struct _GstVideofilterFormat {
char *fourcc;
int bpp;
GstVideofilterFilterFunc filter_func;
int depth;
unsigned int endianness;
unsigned int red_mask;
unsigned int green_mask;
unsigned int blue_mask;
};
#define GST_TYPE_VIDEOFILTER \ #define GST_TYPE_VIDEOFILTER \
(gst_videofilter_get_type()) (gst_videofilter_get_type())
#define GST_VIDEOFILTER(obj) \ #define GST_VIDEOFILTER(obj) \
@ -59,51 +40,17 @@ struct _GstVideofilterFormat {
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEOFILTER)) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEOFILTER))
struct _GstVideofilter { struct _GstVideofilter {
GstElement element; GstBaseTransform element;
GstPad *sinkpad,*srcpad;
/* video state */
gboolean inited; gboolean inited;
GstVideofilterFormat *format;
gint to_width;
gint to_height;
gint from_width;
gint from_height;
gboolean passthru;
/* private */
gint from_buf_size;
gint to_buf_size;
GValue framerate;
GstBuffer *in_buf;
GstBuffer *out_buf;
}; };
struct _GstVideofilterClass { struct _GstVideofilterClass {
GstElementClass parent_class; GstBaseTransformClass parent_class;
GPtrArray *formats;
GstVideofilterSetupFunc setup;
}; };
GType gst_videofilter_get_type(void); GType gst_videofilter_get_type(void);
int gst_videofilter_get_input_width(GstVideofilter *videofilter);
int gst_videofilter_get_input_height(GstVideofilter *videofilter);
void gst_videofilter_set_output_size(GstVideofilter *videofilter,
int width, int height);
GstVideofilterFormat *gst_videofilter_find_format_by_structure (GstVideofilter *filter,
const GstStructure *structure);
GstCaps *gst_videofilter_class_get_capslist(GstVideofilterClass *videofilterclass);
void gst_videofilter_setup (GstVideofilter * videofilter);
void gst_videofilter_class_add_format(GstVideofilterClass *videofilterclass,
GstVideofilterFormat *format);
void gst_videofilter_class_add_pad_templates (GstVideofilterClass *videofilterclass);
G_END_DECLS G_END_DECLS
#endif /* __GST_VIDEOFILTER_H__ */ #endif /* __GST_VIDEOFILTER_H__ */

View file

@ -27,17 +27,11 @@
#include "config.h" #include "config.h"
#endif #endif
/*#define DEBUG_ENABLED */
#include "gstvideoflip.h" #include "gstvideoflip.h"
#include <string.h>
#include <gst/video/video.h>
/* GstVideoflip signals and args */ /* GstVideoflip signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum enum
{ {
ARG_0, ARG_0,
@ -45,21 +39,30 @@ enum
/* FILL ME */ /* FILL ME */
}; };
GST_DEBUG_CATEGORY_STATIC (gst_videoflip_debug); GST_DEBUG_CATEGORY (videoflip_debug);
#define GST_CAT_DEFAULT gst_videoflip_debug #define GST_CAT_DEFAULT videoflip_debug
static void gst_videoflip_base_init (gpointer g_class); static GstElementDetails videoflip_details =
static void gst_videoflip_class_init (gpointer g_class, gpointer class_data); GST_ELEMENT_DETAILS ("Video Flipper",
static void gst_videoflip_init (GTypeInstance * instance, gpointer g_class); "Filter/Effect/Video",
"Flips and rotates video",
"David Schleef <ds@schleef.org>");
static void gst_videoflip_set_property (GObject * object, guint prop_id, static GstStaticPadTemplate gst_videoflip_src_template =
const GValue * value, GParamSpec * pspec); GST_STATIC_PAD_TEMPLATE ("src",
static void gst_videoflip_get_property (GObject * object, guint prop_id, GST_PAD_SRC,
GValue * value, GParamSpec * pspec); GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
);
static void gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest, static GstStaticPadTemplate gst_videoflip_sink_template =
void *src); GST_STATIC_PAD_TEMPLATE ("sink",
static void gst_videoflip_setup (GstVideofilter * videofilter); GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
);
static GstVideofilterClass *parent_class = NULL;
#define GST_TYPE_VIDEOFLIP_METHOD (gst_videoflip_method_get_type()) #define GST_TYPE_VIDEOFLIP_METHOD (gst_videoflip_method_get_type())
@ -72,7 +75,7 @@ gst_videoflip_method_get_type (void)
{GST_VIDEOFLIP_METHOD_90R, "Rotate clockwise 90 degrees", "clockwise"}, {GST_VIDEOFLIP_METHOD_90R, "Rotate clockwise 90 degrees", "clockwise"},
{GST_VIDEOFLIP_METHOD_180, "Rotate 180 degrees", "rotate-180"}, {GST_VIDEOFLIP_METHOD_180, "Rotate 180 degrees", "rotate-180"},
{GST_VIDEOFLIP_METHOD_90L, "Rotate counter-clockwise 90 degrees", {GST_VIDEOFLIP_METHOD_90L, "Rotate counter-clockwise 90 degrees",
"counterclockwise"}, "counterclockwise"},
{GST_VIDEOFLIP_METHOD_HORIZ, "Flip horizontally", "horizontal-flip"}, {GST_VIDEOFLIP_METHOD_HORIZ, "Flip horizontally", "horizontal-flip"},
{GST_VIDEOFLIP_METHOD_VERT, "Flip vertically", "vertical-flip"}, {GST_VIDEOFLIP_METHOD_VERT, "Flip vertically", "vertical-flip"},
{GST_VIDEOFLIP_METHOD_TRANS, {GST_VIDEOFLIP_METHOD_TRANS,
@ -89,243 +92,149 @@ gst_videoflip_method_get_type (void)
return videoflip_method_type; return videoflip_method_type;
} }
GType static gboolean
gst_videoflip_get_type (void) gst_videoflip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{ {
static GType videoflip_type = 0; GstVideoflip *vf;
GstStructure *in_s, *out_s;
gboolean ret = FALSE;
if (!videoflip_type) { vf = GST_VIDEOFLIP (btrans);
static const GTypeInfo videoflip_info = {
sizeof (GstVideoflipClass),
gst_videoflip_base_init,
NULL,
gst_videoflip_class_init,
NULL,
NULL,
sizeof (GstVideoflip),
0,
gst_videoflip_init,
};
videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER, in_s = gst_caps_get_structure (incaps, 0);
"GstVideoflip", &videoflip_info, 0); out_s = gst_caps_get_structure (outcaps, 0);
}
return videoflip_type;
}
static GstVideofilterFormat gst_videoflip_formats[] = { if (gst_structure_get_int (in_s, "width", &vf->from_width) &&
/* planar */ gst_structure_get_int (in_s, "height", &vf->from_height) &&
{"YV12", 12, gst_videoflip_planar411,}, gst_structure_get_int (out_s, "width", &vf->to_width) &&
{"I420", 12, gst_videoflip_planar411,}, gst_structure_get_int (out_s, "height", &vf->to_height)) {
{"IYUV", 12, gst_videoflip_planar411,}, /* Check that they are correct */
}; switch (vf->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
if ((vf->from_width != vf->to_height) ||
(vf->from_height != vf->to_width)) {
GST_DEBUG_OBJECT (vf, "we are inverting width and height but caps "
"are not correct : %dx%d to %dx%d", vf->from_width,
vf->from_height, vf->to_width, vf->to_height);
goto beach;
}
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
static void break;
gst_videoflip_base_init (gpointer g_class) case GST_VIDEOFLIP_METHOD_180:
{ case GST_VIDEOFLIP_METHOD_HORIZ:
static GstElementDetails videoflip_details = case GST_VIDEOFLIP_METHOD_VERT:
GST_ELEMENT_DETAILS ("Video Flipper", if ((vf->from_width != vf->to_width) ||
"Filter/Effect/Video", (vf->from_height != vf->to_height)) {
"Flips and rotates video", GST_DEBUG_OBJECT (vf, "we are keeping width and height but caps "
"David Schleef <ds@schleef.org>"); "are not correct : %dx%d to %dx%d", vf->from_width,
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); vf->from_height, vf->to_width, vf->to_height);
GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); goto beach;
int i; }
break;
gst_element_class_set_details (element_class, &videoflip_details); default:
g_assert_not_reached ();
for (i = 0; i < G_N_ELEMENTS (gst_videoflip_formats); i++) { break;
gst_videofilter_class_add_format (videofilter_class, }
gst_videoflip_formats + i);
} }
gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); ret = TRUE;
beach:
return ret;
} }
static void static GstCaps *
gst_videoflip_class_init (gpointer g_class, gpointer class_data) gst_videoflip_transform_caps (GstBaseTransform * trans,
{ GstPadDirection direction, GstCaps * caps)
GObjectClass *gobject_class;
GstVideofilterClass *videofilter_class;
gobject_class = G_OBJECT_CLASS (g_class);
videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
gobject_class->set_property = gst_videoflip_set_property;
gobject_class->get_property = gst_videoflip_get_property;
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
G_PARAM_READWRITE));
videofilter_class->setup = gst_videoflip_setup;
}
static void
gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
{
GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
GstVideofilter *videofilter;
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
videofilter = GST_VIDEOFILTER (videoflip);
/* do stuff */
}
static void
gst_videoflip_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{ {
GstVideoflip *videoflip; GstVideoflip *videoflip;
GstVideofilter *videofilter; GstCaps *ret;
gint width, height, i;
g_return_if_fail (GST_IS_VIDEOFLIP (object)); videoflip = GST_VIDEOFLIP (trans);
videoflip = GST_VIDEOFLIP (object);
videofilter = GST_VIDEOFILTER (object);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_set_property"); ret = gst_caps_copy (caps);
switch (prop_id) {
case ARG_METHOD: for (i = 0; i < gst_caps_get_size (ret); i++) {
videoflip->method = g_value_get_enum (value); GstStructure *structure = gst_caps_get_structure (ret, i);
if (videofilter->inited) {
GST_DEBUG_OBJECT (videoflip, "setting up videoflip again"); if (gst_structure_get_int (structure, "width", &width) &&
gst_videofilter_setup (videofilter); gst_structure_get_int (structure, "height", &height)) {
switch (videoflip->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
gst_structure_set (structure, "width", G_TYPE_INT, height,
"height", G_TYPE_INT, width, NULL);
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
case GST_VIDEOFLIP_METHOD_180:
case GST_VIDEOFLIP_METHOD_HORIZ:
case GST_VIDEOFLIP_METHOD_VERT:
gst_structure_set (structure, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, NULL);
break;
default:
g_assert_not_reached ();
break;
} }
break; }
default:
break;
} }
GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
GST_PTR_FORMAT, caps, ret);
return ret;
} }
static void /* Useful macros */
gst_videoflip_get_property (GObject * object, guint prop_id, GValue * value, #define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
GParamSpec * pspec) #define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
{ #define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
GstVideoflip *videoflip;
g_return_if_fail (GST_IS_VIDEOFLIP (object)); #define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
videoflip = GST_VIDEOFLIP (object); #define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
switch (prop_id) { #define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
case ARG_METHOD:
g_value_set_enum (value, videoflip->method);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean static gboolean
plugin_init (GstPlugin * plugin) gst_videoflip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
{ guint * size)
GST_DEBUG_CATEGORY_INIT (gst_videoflip_debug, "videoflip", 0, "videoflip");
return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
GST_TYPE_VIDEOFLIP);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videoflip",
"Flips and rotates video",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
static void gst_videoflip_flip (GstVideoflip * videoflip,
unsigned char *dest, unsigned char *src, int sw, int sh, int dw, int dh);
static void
gst_videoflip_setup (GstVideofilter * videofilter)
{
int from_width, from_height;
GstVideoflip *videoflip;
videoflip = GST_VIDEOFLIP (videofilter);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_setup");
from_width = gst_videofilter_get_input_width (videofilter);
from_height = gst_videofilter_get_input_height (videofilter);
if (from_width == 0 || from_height == 0) {
return;
}
switch (videoflip->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
case GST_VIDEOFLIP_METHOD_OTHER:
gst_videofilter_set_output_size (videofilter, from_height, from_width);
break;
case GST_VIDEOFLIP_METHOD_IDENTITY:
case GST_VIDEOFLIP_METHOD_180:
case GST_VIDEOFLIP_METHOD_HORIZ:
case GST_VIDEOFLIP_METHOD_VERT:
gst_videofilter_set_output_size (videofilter, from_width, from_height);
break;
default:
g_assert_not_reached ();
break;
}
GST_DEBUG_OBJECT (videoflip, "format=%p \"%s\" from %dx%d to %dx%d",
videofilter->format, videofilter->format->fourcc,
from_width, from_height, videofilter->to_width, videofilter->to_height);
if (videoflip->method == GST_VIDEOFLIP_METHOD_IDENTITY) {
GST_DEBUG_OBJECT (videoflip, "videoflip: using passthru");
videofilter->passthru = TRUE;
} else {
videofilter->passthru = FALSE;
}
}
static void
gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest, void *src)
{ {
GstVideoflip *videoflip; GstVideoflip *videoflip;
int sw; GstStructure *structure;
int sh; gboolean ret = FALSE;
int dw; gint width, height;
int dh;
g_return_if_fail (GST_IS_VIDEOFLIP (videofilter)); videoflip = GST_VIDEOFLIP (btrans);
videoflip = GST_VIDEOFLIP (videofilter);
sw = videofilter->from_width; structure = gst_caps_get_structure (caps, 0);
sh = videofilter->from_height;
dw = videofilter->to_width;
dh = videofilter->to_height;
GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d", if (gst_structure_get_int (structure, "width", &width) &&
sw, sh, dw, dh); gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (videoflip, "our frame size is %d bytes (%dx%d)", *size,
width, height);
}
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh); return ret;
src += sw * sh;
dest += dw * dh;
dh = dh >> 1;
dw = dw >> 1;
sh = sh >> 1;
sw = sw >> 1;
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
src += sw * sh;
dest += dw * dh;
gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
} }
static void static GstFlowReturn
gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest, gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
unsigned char *src, int sw, int sh, int dw, int dh) unsigned char *src, int sw, int sh, int dw, int dh)
{ {
GstFlowReturn ret = GST_FLOW_OK;
int x, y; int x, y;
switch (videoflip->method) { switch (videoflip->method) {
@ -379,7 +288,265 @@ gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
} }
break; break;
default: default:
/* FIXME */ ret = GST_FLOW_ERROR;
break;
}
return ret;
}
static GstFlowReturn
gst_videoflip_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstVideoflip *videoflip;
gpointer dest, src;
int sw, sh, dw, dh;
GstFlowReturn ret = GST_FLOW_OK;
videoflip = GST_VIDEOFLIP (trans);
gst_buffer_stamp (out, in);
src = GST_BUFFER_DATA (in);
dest = GST_BUFFER_DATA (out);
sw = videoflip->from_width;
sh = videoflip->from_height;
dw = videoflip->to_width;
dh = videoflip->to_height;
GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d",
sw, sh, dw, dh);
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
if (ret != GST_FLOW_OK)
goto beach;
src += sw * sh;
dest += dw * dh;
dh = dh >> 1;
dw = dw >> 1;
sh = sh >> 1;
sw = sw >> 1;
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
if (ret != GST_FLOW_OK)
goto beach;
src += sw * sh;
dest += dw * dh;
ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
beach:
return ret;
}
static gboolean
gst_videoflip_handle_src_event (GstPad * pad, GstEvent * event)
{
GstVideoflip *vf;
gboolean ret;
gdouble x, y;
GstStructure *structure;
vf = GST_VIDEOFLIP (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION:
event =
GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
structure = (GstStructure *) gst_event_get_structure (event);
if (gst_structure_get_double (structure, "pointer_x", &x) &&
gst_structure_get_double (structure, "pointer_y", &y)) {
switch (vf->method) {
case GST_VIDEOFLIP_METHOD_90R:
case GST_VIDEOFLIP_METHOD_OTHER:
x = y;
y = vf->to_width - x;
break;
case GST_VIDEOFLIP_METHOD_90L:
case GST_VIDEOFLIP_METHOD_TRANS:
x = vf->to_height - y;
y = x;
break;
case GST_VIDEOFLIP_METHOD_180:
x = vf->to_width - x;
y = vf->to_height - y;
break;
case GST_VIDEOFLIP_METHOD_HORIZ:
x = vf->to_width - x;
y = y;
break;
case GST_VIDEOFLIP_METHOD_VERT:
x = x;
y = vf->to_height - y;
break;
default:
x = x;
y = y;
break;
}
gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x,
"pointer_y", G_TYPE_DOUBLE, y, NULL);
}
break;
default:
break;
}
ret = gst_pad_event_default (pad, event);
gst_object_unref (vf);
return ret;
}
static void
gst_videoflip_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVideoflip *videoflip;
GstVideofilter *videofilter;
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
videofilter = GST_VIDEOFILTER (object);
switch (prop_id) {
case ARG_METHOD:
{
GstVideoflipMethod method;
method = g_value_get_enum (value);
if (method != videoflip->method) {
GstBaseTransform *btrans = GST_BASE_TRANSFORM (videoflip);
g_mutex_lock (btrans->transform_lock);
gst_pad_set_caps (btrans->sinkpad, NULL);
gst_pad_set_caps (btrans->srcpad, NULL);
g_mutex_unlock (btrans->transform_lock);
videoflip->method = method;
}
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
} }
} }
static void
gst_videoflip_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVideoflip *videoflip;
g_return_if_fail (GST_IS_VIDEOFLIP (object));
videoflip = GST_VIDEOFLIP (object);
switch (prop_id) {
case ARG_METHOD:
g_value_set_enum (value, videoflip->method);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_videoflip_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &videoflip_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_videoflip_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_videoflip_src_template));
}
static void
gst_videoflip_class_init (gpointer klass, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_videoflip_set_property;
gobject_class->get_property = gst_videoflip_get_property;
g_object_class_install_property (gobject_class, ARG_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
G_PARAM_READWRITE));
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_videoflip_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_videoflip_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_videoflip_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_videoflip_transform);
}
static void
gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
{
GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
videoflip->method = GST_VIDEOFLIP_METHOD_90R;
gst_pad_set_event_function (btrans->srcpad,
GST_DEBUG_FUNCPTR (gst_videoflip_handle_src_event));
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (videoflip_debug, "videoflip", 0, "videoflip");
return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
GST_TYPE_VIDEOFLIP);
}
GType
gst_videoflip_get_type (void)
{
static GType videoflip_type = 0;
if (!videoflip_type) {
static const GTypeInfo videoflip_info = {
sizeof (GstVideoflipClass),
gst_videoflip_base_init,
NULL,
gst_videoflip_class_init,
NULL,
NULL,
sizeof (GstVideoflip),
0,
gst_videoflip_init,
};
videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
"GstVideoflip", &videoflip_info, 0);
}
return videoflip_type;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videoflip",
"Flips and rotates video",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View file

@ -17,16 +17,11 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
#ifndef __GST_VIDEOFLIP_H__ #ifndef __GST_VIDEOFLIP_H__
#define __GST_VIDEOFLIP_H__ #define __GST_VIDEOFLIP_H__
#include <gst/gst.h>
#include "gstvideofilter.h" #include "gstvideofilter.h"
G_BEGIN_DECLS G_BEGIN_DECLS
typedef enum { typedef enum {
@ -37,7 +32,7 @@ typedef enum {
GST_VIDEOFLIP_METHOD_HORIZ, GST_VIDEOFLIP_METHOD_HORIZ,
GST_VIDEOFLIP_METHOD_VERT, GST_VIDEOFLIP_METHOD_VERT,
GST_VIDEOFLIP_METHOD_TRANS, GST_VIDEOFLIP_METHOD_TRANS,
GST_VIDEOFLIP_METHOD_OTHER, GST_VIDEOFLIP_METHOD_OTHER
} GstVideoflipMethod; } GstVideoflipMethod;
#define GST_TYPE_VIDEOFLIP \ #define GST_TYPE_VIDEOFLIP \
@ -56,7 +51,10 @@ typedef struct _GstVideoflipClass GstVideoflipClass;
struct _GstVideoflip { struct _GstVideoflip {
GstVideofilter videofilter; GstVideofilter videofilter;
gint from_width, from_height;
gint to_width, to_height;
GstVideoflipMethod method; GstVideoflipMethod method;
}; };
@ -69,4 +67,3 @@ GType gst_videoflip_get_type(void);
G_END_DECLS G_END_DECLS
#endif /* __GST_VIDEOFLIP_H__ */ #endif /* __GST_VIDEOFLIP_H__ */