opencv: port to 0.11

Basic port to 0.11 API.

https://bugzilla.gnome.org/show_bug.cgi?id=679164
This commit is contained in:
Sreerenj Balachandran 2012-06-30 00:22:40 +03:00 committed by Tim-Philipp Müller
parent 8712729a8c
commit 45ca8876b2
18 changed files with 679 additions and 619 deletions

View file

@ -323,7 +323,7 @@ GST_PLUGINS_NONPORTED=" aiff \
decklink fbdev linsys vcd \
apexsink cdaudio cog dc1394 dirac directfb resindvd \
gsettings jasper ladspa mimic \
musepack musicbrainz nas neon ofa openal opencv rsvg sdl sndfile spandsp spc timidity \
musepack musicbrainz nas neon ofa openal rsvg sdl sndfile spandsp spc timidity \
directsound directdraw direct3d9 acm wininet \
wildmidi xvid lv2 teletextdec dvb sndio"
AC_SUBST(GST_PLUGINS_NONPORTED)

View file

@ -52,39 +52,29 @@
GST_DEBUG_CATEGORY_STATIC (gst_cv_dilate_debug);
#define GST_CAT_DEFAULT gst_cv_dilate_debug
GST_BOILERPLATE (GstCvDilate, gst_cv_dilate, GstCvDilateErode,
GST_TYPE_CV_DILATE_ERODE);
G_DEFINE_TYPE (GstCvDilate, gst_cv_dilate, GST_TYPE_CV_DILATE_ERODE);
static GstFlowReturn gst_cv_dilate_transform_ip (GstOpencvVideoFilter *
filter, GstBuffer * buf, IplImage * img);
static GstFlowReturn gst_cv_dilate_transform (GstOpencvVideoFilter * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
/* GObject vmethod implementations */
static void
gst_cv_dilate_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"cvdilate",
"Transform/Effect/Video",
"Applies cvDilate OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
/* initialize the cvdilate's class */
static void
gst_cv_dilate_class_init (GstCvDilateClass * klass)
{
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstopencvbasefilter_class->cv_trans_ip_func = gst_cv_dilate_transform_ip;
gstopencvbasefilter_class->cv_trans_func = gst_cv_dilate_transform;
gst_element_class_set_details_simple (element_class,
"cvdilate",
"Transform/Effect/Video",
"Applies cvDilate OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
/* initialize the new element
@ -93,7 +83,7 @@ gst_cv_dilate_class_init (GstCvDilateClass * klass)
* initialize instance structure
*/
static void
gst_cv_dilate_init (GstCvDilate * filter, GstCvDilateClass * gclass)
gst_cv_dilate_init (GstCvDilate * filter)
{
}

View file

@ -75,9 +75,6 @@ enum
#define DEFAULT_ITERATIONS 1
static GstElementClass *parent_class = NULL;
static void gst_cv_dilate_erode_base_init (gpointer gclass);
static void gst_cv_dilate_erode_class_init (GstCvDilateErodeClass * klass);
static void gst_cv_dilate_erode_init (GstCvDilateErode * filter,
GstCvDilateErodeClass * gclass);
@ -96,7 +93,7 @@ gst_cv_dilate_erode_get_type (void)
GType _type;
static const GTypeInfo opencv_dilate_erode_info = {
sizeof (GstCvDilateErodeClass),
(GBaseInitFunc) gst_cv_dilate_erode_base_init,
NULL,
NULL,
(GClassInitFunc) gst_cv_dilate_erode_class_init,
NULL,
@ -117,15 +114,25 @@ gst_cv_dilate_erode_get_type (void)
return opencv_dilate_erode_type;
}
/* GObject vmethod implementations */
/* initialize the cvdilate_erode's class */
static void
gst_cv_dilate_erode_base_init (gpointer gclass)
gst_cv_dilate_erode_class_init (GstCvDilateErodeClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class = (GObjectClass *) klass;
gobject_class->set_property = gst_cv_dilate_erode_set_property;
gobject_class->get_property = gst_cv_dilate_erode_get_property;
g_object_class_install_property (gobject_class, PROP_ITERATIONS,
g_param_spec_int ("iterations", "iterations",
"Number of iterations to run the algorithm", 1, G_MAXINT,
DEFAULT_ITERATIONS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_16UC1);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC4));
@ -134,29 +141,8 @@ gst_cv_dilate_erode_base_init (gpointer gclass)
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
gst_object_unref (templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
gst_object_unref (templ);
}
/* initialize the cvdilate_erode's class */
static void
gst_cv_dilate_erode_class_init (GstCvDilateErodeClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_cv_dilate_erode_set_property;
gobject_class->get_property = gst_cv_dilate_erode_get_property;
g_object_class_install_property (gobject_class, PROP_ITERATIONS,
g_param_spec_int ("iterations", "iterations",
"Number of iterations to run the algorithm", 1, G_MAXINT,
DEFAULT_ITERATIONS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
/* initialize the new element

View file

@ -56,26 +56,29 @@ GST_DEBUG_CATEGORY_STATIC (gst_cv_equalize_hist_debug);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8));
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8")));
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8));
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8")));
GST_BOILERPLATE (GstCvEqualizeHist, gst_cv_equalize_hist,
GstOpencvVideoFilter, GST_TYPE_OPENCV_VIDEO_FILTER);
G_DEFINE_TYPE (GstCvEqualizeHist, gst_cv_equalize_hist,
GST_TYPE_OPENCV_VIDEO_FILTER);
static GstFlowReturn gst_cv_equalize_hist_transform (GstOpencvVideoFilter *
filter, GstBuffer * buf, IplImage * img, GstBuffer * outbuf,
IplImage * outimg);
/* GObject vmethod implementations */
static void
gst_cv_equalize_hist_base_init (gpointer gclass)
gst_cv_equalize_hist_class_init (GstCvEqualizeHistClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
gstopencvbasefilter_class->cv_trans_func = gst_cv_equalize_hist_transform;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
@ -90,19 +93,7 @@ gst_cv_equalize_hist_base_init (gpointer gclass)
}
static void
gst_cv_equalize_hist_class_init (GstCvEqualizeHistClass * klass)
{
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstopencvbasefilter_class->cv_trans_func = gst_cv_equalize_hist_transform;
}
static void
gst_cv_equalize_hist_init (GstCvEqualizeHist * filter,
GstCvEqualizeHistClass * gclass)
gst_cv_equalize_hist_init (GstCvEqualizeHist * filter)
{
gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), FALSE);
}

View file

@ -52,39 +52,29 @@
GST_DEBUG_CATEGORY_STATIC (gst_cv_erode_debug);
#define GST_CAT_DEFAULT gst_cv_erode_debug
GST_BOILERPLATE (GstCvErode, gst_cv_erode, GstCvDilateErode,
GST_TYPE_CV_DILATE_ERODE);
G_DEFINE_TYPE (GstCvErode, gst_cv_erode, GST_TYPE_CV_DILATE_ERODE);
static GstFlowReturn gst_cv_erode_transform_ip (GstOpencvVideoFilter *
filter, GstBuffer * buf, IplImage * img);
static GstFlowReturn gst_cv_erode_transform (GstOpencvVideoFilter * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
/* GObject vmethod implementations */
static void
gst_cv_erode_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"cverode",
"Transform/Effect/Video",
"Applies cvErode OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
/* initialize the cverode's class */
static void
gst_cv_erode_class_init (GstCvErodeClass * klass)
{
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstopencvbasefilter_class->cv_trans_ip_func = gst_cv_erode_transform_ip;
gstopencvbasefilter_class->cv_trans_func = gst_cv_erode_transform;
gst_element_class_set_details_simple (element_class,
"cverode",
"Transform/Effect/Video",
"Applies cvErode OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
/* initialize the new element
@ -93,7 +83,7 @@ gst_cv_erode_class_init (GstCvErodeClass * klass)
* initialize instance structure
*/
static void
gst_cv_erode_init (GstCvErode * filter, GstCvErodeClass * gclass)
gst_cv_erode_init (GstCvErode * filter)
{
}

View file

@ -56,20 +56,22 @@ GST_DEBUG_CATEGORY_STATIC (gst_cv_laplace_debug);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8"))
);
#if G_BYTE_ORDER == G_BIG_ENDIAN
#define BYTE_ORDER_STRING "BIG_ENDIAN"
#else
#define BYTE_ORDER_STRING "LITTLE_ENDIAN"
#endif
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY16 (BYTE_ORDER_STRING))
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_BE"))
);
#else
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_LE"))
);
#endif
/* Filter signals and args */
enum
@ -85,8 +87,7 @@ enum
#define DEFAULT_APERTURE_SIZE 3
GST_BOILERPLATE (GstCvLaplace, gst_cv_laplace, GstOpencvVideoFilter,
GST_TYPE_OPENCV_VIDEO_FILTER);
G_DEFINE_TYPE (GstCvLaplace, gst_cv_laplace, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_cv_laplace_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -94,7 +95,7 @@ static void gst_cv_laplace_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_cv_laplace_transform_caps (GstBaseTransform * trans,
GstPadDirection dir, GstCaps * caps);
GstPadDirection dir, GstCaps * caps, GstCaps * filter);
static GstFlowReturn gst_cv_laplace_transform (GstOpencvVideoFilter * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
@ -112,26 +113,7 @@ gst_cv_laplace_finalize (GObject * obj)
if (filter->intermediary_img)
cvReleaseImage (&filter->intermediary_img);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_cv_laplace_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class,
"cvlaplace",
"Transform/Effect/Video",
"Applies cvLaplace OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
G_OBJECT_CLASS (gst_cv_laplace_parent_class)->finalize (obj);
}
/* initialize the cvlaplace's class */
@ -141,13 +123,12 @@ gst_cv_laplace_class_init (GstCvLaplaceClass * klass)
GObjectClass *gobject_class;
GstBaseTransformClass *gstbasetransform_class;
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
gstbasetransform_class = (GstBaseTransformClass *) klass;
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_cv_laplace_finalize);
gobject_class->set_property = gst_cv_laplace_set_property;
gobject_class->get_property = gst_cv_laplace_get_property;
@ -161,10 +142,21 @@ gst_cv_laplace_class_init (GstCvLaplaceClass * klass)
g_param_spec_int ("aperture-size", "aperture size",
"Size of the extended Laplace Kernel (1, 3, 5 or 7)", 1, 7,
DEFAULT_APERTURE_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class,
"cvlaplace",
"Transform/Effect/Video",
"Applies cvLaplace OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
static void
gst_cv_laplace_init (GstCvLaplace * filter, GstCvLaplaceClass * gclass)
gst_cv_laplace_init (GstCvLaplace * filter)
{
filter->aperture_size = DEFAULT_APERTURE_SIZE;
@ -189,7 +181,6 @@ gst_cv_laplace_cv_set_caps (GstOpencvVideoFilter * trans, gint in_width,
GST_WARNING_OBJECT (filter, "Unsupported output depth %d", out_depth);
return FALSE;
}
if (filter->intermediary_img) {
cvReleaseImage (&filter->intermediary_img);
}
@ -202,41 +193,71 @@ gst_cv_laplace_cv_set_caps (GstOpencvVideoFilter * trans, gint in_width,
static GstCaps *
gst_cv_laplace_transform_caps (GstBaseTransform * trans, GstPadDirection dir,
GstCaps * caps)
GstCaps * caps, GstCaps * filter)
{
GstCaps *output = NULL;
GstCaps *to, *ret;
GstCaps *templ;
GstStructure *structure;
GstPad *other;
gint i;
output = gst_caps_copy (caps);
to = gst_caps_new_empty ();
for (i = 0; i < gst_caps_get_size (caps); i++) {
const GValue *v;
GValue list = { 0, };
GValue val = { 0, };
structure = gst_structure_copy (gst_caps_get_structure (caps, i));
g_value_init (&list, GST_TYPE_LIST);
g_value_init (&val, G_TYPE_STRING);
g_value_set_string (&val, "GRAY8");
gst_value_list_append_value (&list, &val);
g_value_unset (&val);
g_value_init (&val, G_TYPE_STRING);
#if G_BYTE_ORDER == G_BIG_ENDIAN
g_value_set_string (&val, "GRAY16_BE");
#else
g_value_set_string (&val, "GRAY16_LE");
#endif
gst_value_list_append_value (&list, &val);
g_value_unset (&val);
v = gst_structure_get_value (structure, "format");
gst_value_list_merge (&val, v, &list);
gst_structure_set_value (structure, "format", &val);
g_value_unset (&val);
g_value_unset (&list);
gst_structure_remove_field (structure, "colorimetry");
gst_structure_remove_field (structure, "chroma-site");
gst_caps_append_structure (to, structure);
/* we accept anything from the template caps for either side */
switch (dir) {
case GST_PAD_SINK:
for (i = 0; i < gst_caps_get_size (output); i++) {
structure = gst_caps_get_structure (output, i);
gst_structure_set (structure,
"depth", G_TYPE_INT, 16,
"bpp", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
}
break;
case GST_PAD_SRC:
for (i = 0; i < gst_caps_get_size (output); i++) {
structure = gst_caps_get_structure (output, i);
gst_structure_set (structure,
"depth", G_TYPE_INT, 8, "bpp", G_TYPE_INT, 8, NULL);
gst_structure_remove_field (structure, "endianness");
}
break;
default:
gst_caps_unref (output);
output = NULL;
g_assert_not_reached ();
break;
}
return output;
/* filter against set allowed caps on the pad */
other = (dir == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad;
templ = gst_pad_get_pad_template_caps (other);
ret = gst_caps_intersect (to, templ);
gst_caps_unref (to);
gst_caps_unref (templ);
if (ret && filter) {
GstCaps *intersection;
intersection =
gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (ret);
ret = intersection;
}
return ret;
}
static void

View file

@ -106,8 +106,7 @@ gst_cv_smooth_type_get_type (void)
#define DEFAULT_PARAM3 0.0
#define DEFAULT_PARAM4 0.0
GST_BOILERPLATE (GstCvSmooth, gst_cv_smooth, GstOpencvVideoFilter,
GST_TYPE_OPENCV_VIDEO_FILTER);
G_DEFINE_TYPE (GstCvSmooth, gst_cv_smooth, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_cv_smooth_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -119,45 +118,19 @@ static GstFlowReturn gst_cv_smooth_transform_ip (GstOpencvVideoFilter *
static GstFlowReturn gst_cv_smooth_transform (GstOpencvVideoFilter * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
/* GObject vmethod implementations */
static void
gst_cv_smooth_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstCaps *caps;
GstPadTemplate *templ;
gst_element_class_set_details_simple (element_class,
"cvsmooth",
"Transform/Effect/Video",
"Applies cvSmooth OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_8UC3);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
gst_object_unref (templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
gst_object_unref (templ);
}
/* initialize the cvsmooth's class */
static void
gst_cv_smooth_class_init (GstCvSmoothClass * klass)
{
GObjectClass *gobject_class;
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstCaps *caps;
GstPadTemplate *templ;
gobject_class = (GObjectClass *) klass;
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_cv_smooth_set_property;
gobject_class->get_property = gst_cv_smooth_get_property;
@ -201,6 +174,21 @@ gst_cv_smooth_class_init (GstCvSmoothClass * klass)
"/documentation/image_filtering.html#cvSmooth",
0, G_MAXDOUBLE, DEFAULT_PARAM4,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"cvsmooth",
"Transform/Effect/Video",
"Applies cvSmooth OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
/* add sink and source pad templates */
caps = gst_opencv_caps_from_cv_image_type (CV_8UC3);
gst_caps_append (caps, gst_opencv_caps_from_cv_image_type (CV_8UC1));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps);
gst_element_class_add_pad_template (element_class, templ);
}
/* initialize the new element
@ -209,7 +197,7 @@ gst_cv_smooth_class_init (GstCvSmoothClass * klass)
* initialize instance structure
*/
static void
gst_cv_smooth_init (GstCvSmooth * filter, GstCvSmoothClass * gclass)
gst_cv_smooth_init (GstCvSmooth * filter)
{
filter->type = DEFAULT_CV_SMOOTH_TYPE;
filter->param1 = DEFAULT_PARAM1;

View file

@ -56,20 +56,23 @@ GST_DEBUG_CATEGORY_STATIC (gst_cv_sobel_debug);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8"))
);
#if G_BYTE_ORDER == G_BIG_ENDIAN
#define BYTE_ORDER_STRING "BIG_ENDIAN"
#else
#define BYTE_ORDER_STRING "LITTLE_ENDIAN"
#endif
#if G_BYTE_ORDER == G_BIG_ENDIAN
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY16 (BYTE_ORDER_STRING))
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_BE"))
);
#else
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_LE"))
);
#endif
/* Filter signals and args */
enum
@ -89,8 +92,7 @@ enum
#define DEFAULT_Y_ORDER 0
#define DEFAULT_APERTURE_SIZE 3
GST_BOILERPLATE (GstCvSobel, gst_cv_sobel, GstOpencvVideoFilter,
GST_TYPE_OPENCV_VIDEO_FILTER);
G_DEFINE_TYPE (GstCvSobel, gst_cv_sobel, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_cv_sobel_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -98,30 +100,11 @@ static void gst_cv_sobel_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_cv_sobel_transform_caps (GstBaseTransform * trans,
GstPadDirection dir, GstCaps * caps);
GstPadDirection dir, GstCaps * caps, GstCaps * filter);
static GstFlowReturn gst_cv_sobel_transform (GstOpencvVideoFilter * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
/* GObject vmethod implementations */
static void
gst_cv_sobel_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class,
"cvsobel",
"Transform/Effect/Video",
"Applies cvSobel OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
/* initialize the cvsobel's class */
static void
gst_cv_sobel_class_init (GstCvSobelClass * klass)
@ -130,12 +113,11 @@ gst_cv_sobel_class_init (GstCvSobelClass * klass)
GstBaseTransformClass *gstbasetransform_class;
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
gstbasetransform_class = (GstBaseTransformClass *) klass;
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_cv_sobel_set_property;
gobject_class->get_property = gst_cv_sobel_get_property;
@ -155,10 +137,21 @@ gst_cv_sobel_class_init (GstCvSobelClass * klass)
g_param_spec_int ("aperture-size", "aperture size",
"Size of the extended Sobel Kernel (1, 3, 5 or 7)", 1, 7,
DEFAULT_APERTURE_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details_simple (element_class,
"cvsobel",
"Transform/Effect/Video",
"Applies cvSobel OpenCV function to the image",
"Thiago Santos<thiago.sousa.santos@collabora.co.uk>");
}
static void
gst_cv_sobel_init (GstCvSobel * filter, GstCvSobelClass * gclass)
gst_cv_sobel_init (GstCvSobel * filter)
{
filter->x_order = DEFAULT_X_ORDER;
filter->y_order = DEFAULT_Y_ORDER;
@ -169,41 +162,69 @@ gst_cv_sobel_init (GstCvSobel * filter, GstCvSobelClass * gclass)
static GstCaps *
gst_cv_sobel_transform_caps (GstBaseTransform * trans, GstPadDirection dir,
GstCaps * caps)
GstCaps * caps, GstCaps * filter)
{
GstCaps *output = NULL;
GstCaps *to, *ret;
GstCaps *templ;
GstStructure *structure;
GstPad *other;
gint i;
output = gst_caps_copy (caps);
to = gst_caps_new_empty ();
/* we accept anything from the template caps for either side */
switch (dir) {
case GST_PAD_SINK:
for (i = 0; i < gst_caps_get_size (output); i++) {
structure = gst_caps_get_structure (output, i);
gst_structure_set (structure,
"depth", G_TYPE_INT, 16,
"bpp", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
}
break;
case GST_PAD_SRC:
for (i = 0; i < gst_caps_get_size (output); i++) {
structure = gst_caps_get_structure (output, i);
gst_structure_set (structure,
"depth", G_TYPE_INT, 8, "bpp", G_TYPE_INT, 8, NULL);
gst_structure_remove_field (structure, "endianness");
}
break;
default:
gst_caps_unref (output);
output = NULL;
g_assert_not_reached ();
break;
for (i = 0; i < gst_caps_get_size (caps); i++) {
const GValue *v;
GValue list = { 0, };
GValue val = { 0, };
structure = gst_structure_copy (gst_caps_get_structure (caps, i));
g_value_init (&list, GST_TYPE_LIST);
g_value_init (&val, G_TYPE_STRING);
g_value_set_string (&val, "GRAY8");
gst_value_list_append_value (&list, &val);
g_value_unset (&val);
g_value_init (&val, G_TYPE_STRING);
#if G_BYTE_ORDER == G_BIG_ENDIAN
g_value_set_string (&val, "GRAY16_BE");
#else
g_value_set_string (&val, "GRAY16_LE");
#endif
gst_value_list_append_value (&list, &val);
g_value_unset (&val);
v = gst_structure_get_value (structure, "format");
gst_value_list_merge (&val, v, &list);
gst_structure_set_value (structure, "format", &val);
g_value_unset (&val);
g_value_unset (&list);
gst_structure_remove_field (structure, "colorimetry");
gst_structure_remove_field (structure, "chroma-site");
gst_caps_append_structure (to, structure);
}
/* filter against set allowed caps on the pad */
other = (dir == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad;
templ = gst_pad_get_pad_template_caps (other);
ret = gst_caps_intersect (to, templ);
gst_caps_unref (to);
gst_caps_unref (templ);
if (ret && filter) {
GstCaps *intersection;
intersection =
gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (ret);
ret = intersection;
}
return output;
return ret;
}
static void

View file

@ -91,24 +91,26 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstEdgeDetect, gst_edge_detect, GstElement, GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstEdgeDetect, gst_edge_detect, GST_TYPE_ELEMENT);
static void gst_edge_detect_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_edge_detect_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_edge_detect_set_caps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_edge_detect_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_edge_detect_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_edge_detect_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf);
/* Clean up */
static void
@ -123,25 +125,7 @@ gst_edge_detect_finalize (GObject * obj)
cvReleaseImage (&filter->cvEdge);
}
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_edge_detect_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"edgedetect",
"Filter/Effect/Video",
"Performs canny edge detection on videos and images.",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
G_OBJECT_CLASS (gst_edge_detect_parent_class)->finalize (obj);
}
/* initialize the edgedetect's class */
@ -150,6 +134,7 @@ gst_edge_detect_class_init (GstEdgeDetectClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_edge_detect_finalize);
@ -172,6 +157,17 @@ gst_edge_detect_class_init (GstEdgeDetectClass * klass)
g_param_spec_int ("aperture", "Aperture",
"Aperture size for Sobel operator (Must be either 3, 5 or 7", 3, 7, 3,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"edgedetect",
"Filter/Effect/Video",
"Performs canny edge detection on videos and images.",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
@ -180,19 +176,17 @@ gst_edge_detect_class_init (GstEdgeDetectClass * klass)
* initialize instance structure
*/
static void
gst_edge_detect_init (GstEdgeDetect * filter, GstEdgeDetectClass * gclass)
gst_edge_detect_init (GstEdgeDetect * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_edge_detect_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_edge_detect_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_edge_detect_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
@ -256,41 +250,58 @@ gst_edge_detect_get_property (GObject * object, guint prop_id,
/* this function handles the link with other elements */
static gboolean
gst_edge_detect_set_caps (GstPad * pad, GstCaps * caps)
gst_edge_detect_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstEdgeDetect *filter;
GstPad *otherpad;
gint width, height;
GstStructure *structure;
gboolean res = TRUE;
filter = GST_EDGE_DETECT (gst_pad_get_parent (pad));
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
filter = GST_EDGE_DETECT (parent);
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvCEdge = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvGray = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
filter->cvEdge = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvCEdge = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvGray = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
filter->cvEdge = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
break;
}
default:
break;
}
res = gst_pad_event_default (pad, parent, event);
return res;
return gst_pad_set_caps (otherpad, caps);
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_edge_detect_chain (GstPad * pad, GstBuffer * buf)
gst_edge_detect_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstEdgeDetect *filter;
GstBuffer *outbuf;
GstMapInfo in_info;
GstMapInfo out_info;
filter = GST_EDGE_DETECT (GST_OBJECT_PARENT (pad));
filter = GST_EDGE_DETECT (parent);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
buf = gst_buffer_make_writable (buf);
gst_buffer_map (buf, &in_info, GST_MAP_WRITE);
filter->cvImage->imageData = (char *) in_info.data;
cvCvtColor (filter->cvImage, filter->cvGray, CV_RGB2GRAY);
cvSmooth (filter->cvGray, filter->cvEdge, CV_BLUR, 3, 3, 0, 0);
@ -306,10 +317,14 @@ gst_edge_detect_chain (GstPad * pad, GstBuffer * buf)
}
outbuf = gst_buffer_new_and_alloc (filter->cvCEdge->imageSize);
gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_ALL);
memcpy (GST_BUFFER_DATA (outbuf), filter->cvCEdge->imageData,
GST_BUFFER_SIZE (outbuf));
gst_buffer_copy_into (outbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
gst_buffer_map (outbuf, &out_info, GST_MAP_WRITE);
memcpy (out_info.data, filter->cvCEdge->imageData,
gst_buffer_get_size (outbuf));
gst_buffer_unmap (buf, &in_info);
gst_buffer_unmap (outbuf, &out_info);
gst_buffer_unref (buf);
return gst_pad_push (filter->srcpad, outbuf);
}

View file

@ -89,24 +89,26 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstFaceBlur, gst_face_blur, GstElement, GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstFaceBlur, gst_face_blur, GST_TYPE_ELEMENT);
static void gst_face_blur_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_face_blur_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_face_blur_set_caps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_face_blur_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_face_blur_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_face_blur_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf);
static void gst_face_blur_load_profile (GstFaceBlur * filter);
@ -123,15 +125,27 @@ gst_face_blur_finalize (GObject * obj)
g_free (filter->profile);
G_OBJECT_CLASS (parent_class)->finalize (obj);
G_OBJECT_CLASS (gst_face_blur_parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
/* initialize the faceblur's class */
static void
gst_face_blur_base_init (gpointer gclass)
gst_face_blur_class_init (GstFaceBlurClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_face_blur_finalize);
gobject_class->set_property = gst_face_blur_set_property;
gobject_class->get_property = gst_face_blur_get_property;
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_string ("profile", "Profile",
"Location of Haar cascade file to use for face blurion",
DEFAULT_PROFILE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"faceblur",
@ -145,44 +159,23 @@ gst_face_blur_base_init (gpointer gclass)
gst_static_pad_template_get (&sink_factory));
}
/* initialize the faceblur's class */
static void
gst_face_blur_class_init (GstFaceBlurClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_face_blur_finalize);
gobject_class->set_property = gst_face_blur_set_property;
gobject_class->get_property = gst_face_blur_get_property;
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_string ("profile", "Profile",
"Location of Haar cascade file to use for face blurion",
DEFAULT_PROFILE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* initialize instance structure
*/
static void
gst_face_blur_init (GstFaceBlur * filter, GstFaceBlurClass * gclass)
gst_face_blur_init (GstFaceBlur * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_face_blur_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_face_blur_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_face_blur_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
@ -228,41 +221,56 @@ gst_face_blur_get_property (GObject * object, guint prop_id,
/* this function handles the link with other elements */
static gboolean
gst_face_blur_set_caps (GstPad * pad, GstCaps * caps)
gst_face_blur_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstFaceBlur *filter;
GstPad *otherpad;
gint width, height;
GstStructure *structure;
gboolean res = TRUE;
filter = GST_FACE_BLUR (gst_pad_get_parent (pad));
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
filter = GST_FACE_BLUR (parent);
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvGray = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
filter->cvStorage = cvCreateMemStorage (0);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
return gst_pad_set_caps (otherpad, caps);
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvGray = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 1);
filter->cvStorage = cvCreateMemStorage (0);
break;
}
default:
break;
}
res = gst_pad_event_default (pad, parent, event);
return res;
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_face_blur_chain (GstPad * pad, GstBuffer * buf)
gst_face_blur_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstFaceBlur *filter;
CvSeq *faces;
GstMapInfo info;
int i;
filter = GST_FACE_BLUR (GST_OBJECT_PARENT (pad));
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
buf = gst_buffer_make_writable (buf);
gst_buffer_map (buf, &info, GST_MAP_READWRITE);
filter->cvImage->imageData = (char *) info.data;
cvCvtColor (filter->cvImage, filter->cvGray, CV_RGB2GRAY);
cvClearMemStorage (filter->cvStorage);

View file

@ -154,17 +154,16 @@ gst_opencv_face_detect_flags_get_type (void)
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstFaceDetect, gst_face_detect, GstOpencvVideoFilter,
GST_TYPE_OPENCV_VIDEO_FILTER);
G_DEFINE_TYPE (GstFaceDetect, gst_face_detect, GST_TYPE_OPENCV_VIDEO_FILTER);
static void gst_face_detect_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -205,26 +204,7 @@ gst_face_detect_finalize (GObject * obj)
if (filter->cvEyesDetect)
cvReleaseHaarClassifierCascade (&filter->cvEyesDetect);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_face_detect_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"facedetect",
"Filter/Effect/Video",
"Performs face detection on videos and images, providing detected positions via bus messages",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
G_OBJECT_CLASS (gst_face_detect_parent_class)->finalize (obj);
}
/* initialize the facedetect's class */
@ -234,6 +214,7 @@ gst_face_detect_class_init (GstFaceDetectClass * klass)
GObjectClass *gobject_class;
GstOpencvVideoFilterClass *gstopencvbasefilter_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
gstopencvbasefilter_class = (GstOpencvVideoFilterClass *) klass;
@ -288,13 +269,24 @@ gst_face_detect_class_init (GstFaceDetectClass * klass)
g_param_spec_int ("min-size-height", "Minimum face height",
"Minimum area height to be recognized as a face", 0, G_MAXINT,
DEFAULT_MIN_SIZE_HEIGHT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"facedetect",
"Filter/Effect/Video",
"Performs face detection on videos and images, providing detected positions via bus messages",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_face_detect_init (GstFaceDetect * filter, GstFaceDetectClass * gclass)
gst_face_detect_init (GstFaceDetect * filter)
{
filter->face_profile = g_strdup (DEFAULT_FACE_PROFILE);
filter->nose_profile = g_strdup (DEFAULT_NOSE_PROFILE);
@ -679,7 +671,8 @@ gst_face_detect_transform_ip (GstOpencvVideoFilter * base, GstBuffer * buf,
}
}
gst_structure_set_value (msg->structure, "faces", &facelist);
gst_structure_set_value ((GstStructure *) gst_message_get_structure (msg),
"faces", &facelist);
g_value_unset (&facelist);
gst_element_post_message (GST_ELEMENT (filter), msg);
}

View file

@ -143,23 +143,24 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB));
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB")));
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB));
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB")));
GST_BOILERPLATE (GstMotioncells, gst_motion_cells, GstElement,
GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstMotioncells, gst_motion_cells, GST_TYPE_ELEMENT);
static void gst_motion_cells_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_motion_cells_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_motion_cells_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_motion_cells_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_motion_cells_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf);
static void gst_motioncells_update_motion_cells (GstMotioncells * filter);
static void gst_motioncells_update_motion_masks (GstMotioncells * filter);
@ -194,25 +195,7 @@ gst_motion_cells_finalize (GObject * obj)
GFREE (filter->basename_datafile);
GFREE (filter->datafile_extension);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_motion_cells_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"motioncells",
"Filter/Effect/Video",
"Performs motion detection on videos and images, providing detected motion cells index via bus messages",
"Robert Jobbagy <jobbagy dot robert at gmail dot com>, Nicola Murino <nicola dot murino at gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
G_OBJECT_CLASS (gst_motion_cells_parent_class)->finalize (obj);
}
/* initialize the motioncells's class */
@ -221,8 +204,8 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_motion_cells_finalize);
gobject_class->set_property = gst_motion_cells_set_property;
@ -316,6 +299,17 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
"Motion Cell Border Thickness, if it's -1 then motion cell will be fill",
THICKNESS_MIN, THICKNESS_MAX, THICKNESS_DEF,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"motioncells",
"Filter/Effect/Video",
"Performs motion detection on videos and images, providing detected motion cells index via bus messages",
"Robert Jobbagy <jobbagy dot robert at gmail dot com>, Nicola Murino <nicola dot murino at gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
@ -324,19 +318,18 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
* initialize instance structure
*/
static void
gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
gst_motion_cells_init (GstMotioncells * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
@ -817,38 +810,53 @@ gst_motioncells_update_motion_masks (GstMotioncells * filter)
/* this function handles the link with other elements */
static gboolean
gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps)
gst_motion_cells_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstMotioncells *filter;
GstPad *otherpad;
GstStructure *structure;
int numerator, denominator;
GstVideoInfo info;
gboolean res = TRUE;
filter = gst_motion_cells (gst_pad_get_parent (pad));
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &filter->width);
gst_structure_get_int (structure, "height", &filter->height);
gst_structure_get_fraction (structure, "framerate", &numerator, &denominator);
filter->framerate = (double) numerator / (double) denominator;
if (filter->cvImage)
cvReleaseImage (&filter->cvImage);
filter->cvImage =
cvCreateImage (cvSize (filter->width, filter->height), IPL_DEPTH_8U, 3);
filter = gst_motion_cells (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
gst_video_info_from_caps (&info, caps);
filter->width = info.width;
filter->height = info.height;
filter->framerate = (double) info.fps_n / (double) info.fps_d;
if (filter->cvImage)
cvReleaseImage (&filter->cvImage);
filter->cvImage =
cvCreateImage (cvSize (filter->width, filter->height), IPL_DEPTH_8U,
3);
break;
}
default:
break;
}
res = gst_pad_event_default (pad, parent, event);
return res;
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
return gst_pad_set_caps (otherpad, caps);
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
gst_motion_cells_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstMotioncells *filter;
filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
GstMapInfo info;
filter = gst_motion_cells (parent);
GST_OBJECT_LOCK (filter);
if (filter->calculate_motion) {
double sensitivity;
@ -865,8 +873,10 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motioncellidx *motionmaskcellsidx;
cellscolor motioncellscolor;
motioncellidx *motioncellsidx;
buf = gst_buffer_make_writable (buf);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
gst_buffer_map (buf, &info, GST_MAP_WRITE);
filter->cvImage->imageData = (char *) info.data;
if (filter->firstframe) {
setPrevFrame (filter->cvImage, filter->id);
filter->firstframe = FALSE;

View file

@ -29,30 +29,33 @@ static gboolean
gst_opencv_get_ipl_depth_and_channels (GstStructure * structure,
gint * ipldepth, gint * channels, GError ** err)
{
gint depth, bpp;
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
const GstVideoFormatInfo *info;
gint depth = 0, i;
const gchar *s;
if (!gst_structure_get_int (structure, "depth", &depth) ||
!gst_structure_get_int (structure, "bpp", &bpp)) {
if (gst_structure_has_name (structure, "video/x-raw")) {
if (!(s = gst_structure_get_string (structure, "format")))
return FALSE;
format = gst_video_format_from_string (s);
if (format == GST_VIDEO_FORMAT_UNKNOWN)
return FALSE;
}
info = gst_video_format_get_info (format);
if (GST_VIDEO_FORMAT_INFO_IS_RGB (info))
*channels=3;
else if (GST_VIDEO_FORMAT_INFO_IS_GRAY (info))
*channels=1;
else {
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"No depth/bpp in caps");
"Unsupported structure %s", gst_structure_get_name (structure));
return FALSE;
}
if (depth != bpp) {
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Depth and bpp should be equal");
return FALSE;
}
if (gst_structure_has_name (structure, "video/x-raw-rgb")) {
*channels = 3;
} else if (gst_structure_has_name (structure, "video/x-raw-gray")) {
*channels = 1;
} else {
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Unsupported caps %s", gst_structure_get_name (structure));
return FALSE;
}
for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (info); i++)
depth += GST_VIDEO_FORMAT_INFO_DEPTH (info, i);
if (depth / *channels == 8) {
/* TODO signdness? */
@ -92,9 +95,42 @@ gboolean
gst_opencv_parse_iplimage_params_from_caps (GstCaps * caps, gint * width,
gint * height, gint * ipldepth, gint * channels, GError ** err)
{
return
gst_opencv_parse_iplimage_params_from_structure (gst_caps_get_structure
(caps, 0), width, height, ipldepth, channels, err);
GstVideoInfo info;
gint i, depth = 0;
if (!gst_video_info_from_caps (&info, caps)) {
GST_ERROR ("Failed to get the videoinfo from caps");
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"No width/heighti/depth/channels in caps");
return FALSE;
}
*width = GST_VIDEO_INFO_WIDTH (&info);
*height = GST_VIDEO_INFO_HEIGHT (&info);
if (GST_VIDEO_INFO_IS_RGB (&info))
*channels = 3;
else if (GST_VIDEO_INFO_IS_GRAY (&info))
*channels = 1;
else {
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Unsupported caps %s", gst_caps_to_string(caps));
return FALSE;
}
for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (&info); i++)
depth += GST_VIDEO_INFO_COMP_DEPTH (&info, i);
if (depth / *channels == 8) {
/* TODO signdness? */
*ipldepth = IPL_DEPTH_8U;
} else if (depth / *channels == 16) {
*ipldepth = IPL_DEPTH_16U;
} else {
g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
"Unsupported depth/channels %d/%d", depth, *channels);
return FALSE;
}
return TRUE;
}
GstCaps *

View file

@ -73,7 +73,6 @@ static void gst_opencv_video_filter_class_init (GstOpencvVideoFilterClass *
klass);
static void gst_opencv_video_filter_init (GstOpencvVideoFilter * trans,
GstOpencvVideoFilterClass * klass);
static void gst_opencv_video_filter_base_init (gpointer gclass);
static gboolean gst_opencv_video_filter_set_caps (GstBaseTransform * trans,
GstCaps * incaps, GstCaps * outcaps);
@ -96,7 +95,7 @@ gst_opencv_video_filter_get_type (void)
GType _type;
static const GTypeInfo opencv_base_transform_info = {
sizeof (GstOpencvVideoFilterClass),
(GBaseInitFunc) gst_opencv_video_filter_base_init,
NULL,
NULL,
(GClassInitFunc) gst_opencv_video_filter_class_init,
NULL,
@ -128,12 +127,6 @@ gst_opencv_video_filter_finalize (GObject * obj)
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_opencv_video_filter_base_init (gpointer gclass)
{
}
static void
gst_opencv_video_filter_class_init (GstOpencvVideoFilterClass * klass)
{
@ -159,7 +152,7 @@ gst_opencv_video_filter_class_init (GstOpencvVideoFilterClass * klass)
static void
gst_opencv_video_filter_init (GstOpencvVideoFilter * transform,
GstOpencvVideoFilterClass * bclass)
GstOpencvVideoFilterClass * klass)
{
}
@ -169,6 +162,8 @@ gst_opencv_video_filter_transform (GstBaseTransform * trans,
{
GstOpencvVideoFilter *transform;
GstOpencvVideoFilterClass *fclass;
GstMapInfo in_info;
GstMapInfo out_info;
GstFlowReturn ret;
transform = GST_OPENCV_VIDEO_FILTER (trans);
@ -178,16 +173,17 @@ gst_opencv_video_filter_transform (GstBaseTransform * trans,
g_return_val_if_fail (transform->cvImage != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (transform->out_cvImage != NULL, GST_FLOW_ERROR);
transform->cvImage->imageData = (char *) gst_buffer_map (inbuf,
NULL, NULL, GST_MAP_READ);
transform->out_cvImage->imageData = (char *) gst_buffer_map (outbuf,
NULL, NULL, GST_MAP_WRITE);
gst_buffer_map (inbuf, &in_info, GST_MAP_READ);
transform->cvImage->imageData = (char *) in_info.data;
gst_buffer_map (outbuf, &out_info, GST_MAP_WRITE);
transform->out_cvImage->imageData = (char *) out_info.data;
ret = fclass->cv_trans_func (transform, inbuf, transform->cvImage, outbuf,
transform->out_cvImage);
gst_buffer_unmap (inbuf, transform->cvImage->imageData, -1);
gst_buffer_unmap (outbuf, transform->out_cvImage->imageData, -1);
gst_buffer_unmap (inbuf, &in_info);
gst_buffer_unmap (outbuf, &out_info);
return ret;
}
@ -198,6 +194,7 @@ gst_opencv_video_filter_transform_ip (GstBaseTransform * trans,
{
GstOpencvVideoFilter *transform;
GstOpencvVideoFilterClass *fclass;
GstMapInfo info;
GstFlowReturn ret;
transform = GST_OPENCV_VIDEO_FILTER (trans);
@ -210,13 +207,13 @@ gst_opencv_video_filter_transform_ip (GstBaseTransform * trans,
* level */
buffer = gst_buffer_make_writable (buffer);
transform->cvImage->imageData = (char *) gst_buffer_map (buffer,
NULL, NULL, GST_MAP_READWRITE);
gst_buffer_map (buffer, &info, GST_MAP_READWRITE);
transform->cvImage->imageData = (char *) info.data;
/* FIXME how to release buffer? */
ret = fclass->cv_trans_ip_func (transform, buffer, transform->cvImage);
gst_buffer_unmap (buffer, transform->cvImage->imageData, -1);
gst_buffer_unmap (buffer, &info);
return ret;
}

View file

@ -93,25 +93,26 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstPyramidSegment, gst_pyramid_segment, GstElement,
GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstPyramidSegment, gst_pyramid_segment, GST_TYPE_ELEMENT);
static void gst_pyramid_segment_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_pyramid_segment_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_pyramid_segment_set_caps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_pyramid_segment_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_pyramid_segment_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_pyramid_segment_chain (GstPad * pad,
GstObject * parent, GstBuffer * buf);
/* Clean up */
static void
@ -124,26 +125,10 @@ gst_pyramid_segment_finalize (GObject * obj)
cvReleaseImage (&filter->cvSegmentedImage);
}
G_OBJECT_CLASS (parent_class)->finalize (obj);
G_OBJECT_CLASS (gst_pyramid_segment_parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_pyramid_segment_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"pyramidsegment",
"Filter/Effect/Video",
"Applies pyramid segmentation to a video or image.",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the pyramidsegment's class */
static void
@ -151,8 +136,8 @@ gst_pyramid_segment_class_init (GstPyramidSegmentClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_pyramid_segment_finalize);
gobject_class->set_property = gst_pyramid_segment_set_property;
@ -176,6 +161,17 @@ gst_pyramid_segment_class_init (GstPyramidSegmentClass * klass)
g_param_spec_int ("level", "Level",
"Maximum level of the pyramid segmentation", 0, 4, 4,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"pyramidsegment",
"Filter/Effect/Video",
"Applies pyramid segmentation to a video or image.",
"Michael Sheldon <mike@mikeasoft.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
@ -184,20 +180,18 @@ gst_pyramid_segment_class_init (GstPyramidSegmentClass * klass)
* initialize instance structure
*/
static void
gst_pyramid_segment_init (GstPyramidSegment * filter,
GstPyramidSegmentClass * gclass)
gst_pyramid_segment_init (GstPyramidSegment * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pyramid_segment_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pyramid_segment_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pyramid_segment_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
@ -264,39 +258,50 @@ gst_pyramid_segment_get_property (GObject * object, guint prop_id,
/* this function handles the link with other elements */
static gboolean
gst_pyramid_segment_set_caps (GstPad * pad, GstCaps * caps)
gst_pyramid_segment_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstPyramidSegment *filter;
GstPad *otherpad;
GstStructure *structure;
gint width, height;
GstVideoInfo info;
gboolean res = TRUE;
filter = GST_PYRAMID_SEGMENT (parent);
filter = GST_PYRAMID_SEGMENT (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
gst_video_info_from_caps (&info, caps);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
filter->cvImage =
cvCreateImage (cvSize (info.width, info.height), IPL_DEPTH_8U, 3);
break;
}
default:
break;
}
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
res = gst_pad_event_default (pad, parent, event);
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
return gst_pad_set_caps (otherpad, caps);
return res;
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_pyramid_segment_chain (GstPad * pad, GstBuffer * buf)
gst_pyramid_segment_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstPyramidSegment *filter;
GstBuffer *outbuf;
GstMapInfo info;
GstMapInfo outinfo;
filter = GST_PYRAMID_SEGMENT (GST_OBJECT_PARENT (pad));
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
buf = gst_buffer_make_writable (buf);
gst_buffer_map (buf, &info, GST_MAP_READWRITE);
filter->cvImage->imageData = (char *) info.data;
filter->cvSegmentedImage = cvCloneImage (filter->cvImage);
cvPyrSegmentation (filter->cvImage, filter->cvSegmentedImage, filter->storage,
@ -306,14 +311,17 @@ gst_pyramid_segment_chain (GstPad * pad, GstBuffer * buf)
* delete only the struct headers. Would avoid a memcpy here */
outbuf = gst_buffer_new_and_alloc (filter->cvSegmentedImage->imageSize);
gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_ALL);
memcpy (GST_BUFFER_DATA (outbuf), filter->cvSegmentedImage->imageData,
GST_BUFFER_SIZE (outbuf));
gst_buffer_copy_into (outbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
gst_buffer_map (outbuf, &outinfo, GST_MAP_WRITE);
memcpy (outinfo.data, filter->cvSegmentedImage->imageData,
gst_buffer_get_size (outbuf));
gst_buffer_unmap (buf, &info);
gst_buffer_unref (buf);
cvReleaseImage (&filter->cvSegmentedImage);
g_assert (filter->cvSegmentedImage == NULL);
gst_buffer_unmap (outbuf, &outinfo);
return gst_pad_push (filter->srcpad, outbuf);
}

View file

@ -91,17 +91,16 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstTemplateMatch, gst_template_match, GstElement,
GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstTemplateMatch, gst_template_match, GST_TYPE_ELEMENT);
static void gst_template_match_finalize (GObject * object);
static void gst_template_match_set_property (GObject * object, guint prop_id,
@ -109,37 +108,23 @@ static void gst_template_match_set_property (GObject * object, guint prop_id,
static void gst_template_match_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_template_match_set_caps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_template_match_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_template_match_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_template_match_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf);
static void gst_template_match_load_template (GstTemplateMatch * filter);
static void gst_template_match_match (IplImage * input, IplImage * template,
IplImage * dist_image, double *best_res, CvPoint * best_pos, int method);
/* GObject vmethod implementations */
static void
gst_template_match_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"templatematch",
"Filter/Effect/Video",
"Performs template matching on videos and images, providing detected positions via bus messages.",
"Noam Lewis <jones.noamle@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the templatematch's class */
static void
gst_template_match_class_init (GstTemplateMatchClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
@ -158,6 +143,17 @@ gst_template_match_class_init (GstTemplateMatchClass * klass)
g_param_spec_boolean ("display", "Display",
"Sets whether the detected template should be highlighted in the output",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"templatematch",
"Filter/Effect/Video",
"Performs template matching on videos and images, providing detected positions via bus messages.",
"Noam Lewis <jones.noamle@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
@ -166,20 +162,15 @@ gst_template_match_class_init (GstTemplateMatchClass * klass)
* initialize instance structure
*/
static void
gst_template_match_init (GstTemplateMatch * filter,
GstTemplateMatchClass * gclass)
gst_template_match_init (GstTemplateMatch * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_template_match_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_template_match_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_template_match_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
@ -260,25 +251,35 @@ gst_template_match_get_property (GObject * object, guint prop_id,
/* this function handles the link with other elements */
static gboolean
gst_template_match_set_caps (GstPad * pad, GstCaps * caps)
gst_template_match_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstTemplateMatch *filter;
GstPad *otherpad;
gint width, height;
GstStructure *structure;
GstVideoInfo info;
gboolean res = TRUE;
filter = GST_TEMPLATE_MATCH (gst_pad_get_parent (pad));
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
filter = GST_TEMPLATE_MATCH (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
gst_video_info_from_caps (&info, caps);
filter->cvImage =
cvCreateImageHeader (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvImage =
cvCreateImageHeader (cvSize (info.width, info.height), IPL_DEPTH_8U,
3);
break;
}
default:
break;
}
res = gst_pad_event_default (pad, parent, event);
return res;
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
return gst_pad_set_caps (otherpad, caps);
}
static void
@ -297,29 +298,32 @@ gst_template_match_finalize (GObject * object)
cvReleaseImage (&filter->cvTemplateImage);
}
GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
G_OBJECT_CLASS (gst_template_match_parent_class)->finalize (object);
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_template_match_chain (GstPad * pad, GstBuffer * buf)
gst_template_match_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
GstTemplateMatch *filter;
CvPoint best_pos;
double best_res;
GstMapInfo info;
filter = GST_TEMPLATE_MATCH (GST_OBJECT_PARENT (pad));
filter = GST_TEMPLATE_MATCH (parent);
/* FIXME Why template == NULL returns OK?
* shouldn't it be a passthrough instead? */
if ((!filter) || (!buf) || filter->template == NULL) {
return GST_FLOW_OK;
}
GST_DEBUG_OBJECT (filter, "Buffer size %u ", GST_BUFFER_SIZE (buf));
GST_DEBUG_OBJECT (filter, "Buffer size %u ", gst_buffer_get_size (buf));
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
buf = gst_buffer_make_writable (buf);
gst_buffer_map (buf, &info, GST_MAP_READWRITE);
filter->cvImage->imageData = (char *) info.data;
if (!filter->cvDistImage) {
if (filter->cvTemplateImage->width > filter->cvImage->width) {

View file

@ -51,7 +51,7 @@
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-0.10 videotestsrc ! ffmpegcolorspace ! opencvtextoverlay text="Opencv Text Overlay " ! ffmpegcolorspace ! xvimagesink
* gst-launch-1.0 videotestsrc ! videoconvert ! opencvtextoverlay text="Opencv Text Overlay " ! videoconvert ! xvimagesink
* ]|
* </refsect2>
*/
@ -68,7 +68,7 @@
GST_DEBUG_CATEGORY_STATIC (gst_opencv_text_overlay_debug);
#define GST_CAT_DEFAULT gst_opencv_opencv_text_overlay_debug
#define DEFAULT_PROP_TEXT ""
#define DEFAULT_PROP_TEXT "Opencv Text Overlay"
#define DEFAULT_PROP_WIDTH 1
#define DEFAULT_PROP_HEIGHT 1
#define DEFAULT_PROP_XPOS 50
@ -106,28 +106,26 @@ enum
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_RGB)
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
);
GST_BOILERPLATE (GstOpencvTextOverlay, gst_opencv_text_overlay, GstElement,
GST_TYPE_ELEMENT);
G_DEFINE_TYPE (GstOpencvTextOverlay, gst_opencv_text_overlay, GST_TYPE_ELEMENT);
static void gst_opencv_text_overlay_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_opencv_text_overlay_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static gboolean gst_opencv_text_overlay_set_caps (GstPad * pad, GstCaps * caps);
static gboolean gst_opencv_text_overlay_handle_sink_event (GstPad * pad,
GstObject * parent, GstEvent * event);
static GstFlowReturn gst_opencv_text_overlay_chain (GstPad * pad,
GstBuffer * buf);
GstObject * parent, GstBuffer * buf);
/* Clean up */
static void
@ -139,27 +137,7 @@ gst_opencv_text_overlay_finalize (GObject * obj)
cvReleaseImage (&filter->cvImage);
}
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
/* GObject vmethod implementations */
static void
gst_opencv_text_overlay_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"opencvtextoverlay",
"Filter/Effect/Video",
"Write text on the top of video", "sreerenj<bsreerenj@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
G_OBJECT_CLASS (gst_opencv_text_overlay_parent_class)->finalize (obj);
}
/* initialize the opencvtextoverlay's class */
@ -168,16 +146,15 @@ gst_opencv_text_overlay_class_init (GstOpencvTextOverlayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize =
GST_DEBUG_FUNCPTR (gst_opencv_text_overlay_finalize);
gobject_class->set_property = gst_opencv_text_overlay_set_property;
gobject_class->get_property = gst_opencv_text_overlay_get_property;
g_object_class_install_property (gobject_class, PROP_TEXT,
g_param_spec_string ("text", "text",
"Text to be display.", DEFAULT_PROP_TEXT,
@ -223,6 +200,16 @@ gst_opencv_text_overlay_class_init (GstOpencvTextOverlayClass * klass)
"Sets the width of fonts", 1.0, 5.0,
DEFAULT_WIDTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"opencvtextoverlay",
"Filter/Effect/Video",
"Write text on the top of video", "sreerenj<bsreerenj@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
}
/* initialize the new element
@ -231,23 +218,22 @@ gst_opencv_text_overlay_class_init (GstOpencvTextOverlayClass * klass)
* initialize instance structure
*/
static void
gst_opencv_text_overlay_init (GstOpencvTextOverlay * filter,
GstOpencvTextOverlayClass * gclass)
gst_opencv_text_overlay_init (GstOpencvTextOverlay * filter)
{
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_opencv_text_overlay_set_caps));
gst_pad_set_getcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
GST_PAD_SET_PROXY_CAPS (filter->sinkpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
gst_pad_set_event_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_opencv_text_overlay_handle_sink_event));
gst_pad_set_chain_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_opencv_text_overlay_chain));
filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (filter->srcpad,
GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
GST_PAD_SET_PROXY_CAPS (filter->srcpad);
gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
filter->textbuf = g_strdup (DEFAULT_PROP_TEXT);
filter->width = DEFAULT_PROP_WIDTH;
filter->height = DEFAULT_PROP_HEIGHT;
@ -343,44 +329,59 @@ gst_opencv_text_overlay_get_property (GObject * object, guint prop_id,
}
}
/* GstElement vmethod implementations */
/* this function handles the link with other elements */
static gboolean
gst_opencv_text_overlay_set_caps (GstPad * pad, GstCaps * caps)
gst_opencv_text_overlay_handle_sink_event (GstPad * pad, GstObject * parent,
GstEvent * event)
{
GstOpencvTextOverlay *filter;
GstPad *otherpad;
gint width, height;
GstStructure *structure;
gboolean res = TRUE;
filter = GST_OPENCV_TEXT_OVERLAY (gst_pad_get_parent (pad));
filter = GST_OPENCV_TEXT_OVERLAY (parent);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvStorage = cvCreateMemStorage (0);
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
gst_object_unref (filter);
if (!filter->cvImage) {
filter->cvImage = cvCreateImage (cvSize (width, height), IPL_DEPTH_8U, 3);
filter->cvStorage = cvCreateMemStorage (0);
}
break;
}
default:
break;
}
return gst_pad_set_caps (otherpad, caps);
res = gst_pad_event_default (pad, parent, event);
return res;
}
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_opencv_text_overlay_chain (GstPad * pad, GstBuffer * buf)
gst_opencv_text_overlay_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf)
{
GstOpencvTextOverlay *filter;
GstMapInfo map_info;
guint8 *data;
filter = GST_OPENCV_TEXT_OVERLAY (GST_OBJECT_PARENT (pad));
filter = GST_OPENCV_TEXT_OVERLAY (parent);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
gst_buffer_map (buf, &map_info, GST_MAP_READ);
data = map_info.data;
filter->cvImage->imageData = (char *) data;
cvInitFont (&(filter->font), CV_FONT_VECTOR0, filter->width, filter->height,
0, filter->thickness, 0);
@ -390,6 +391,7 @@ gst_opencv_text_overlay_chain (GstPad * pad, GstBuffer * buf)
filter->ypos), &(filter->font), cvScalar (filter->colorR,
filter->colorG, filter->colorB, 0));
gst_buffer_unmap (buf, &map_info);
return gst_pad_push (filter->srcpad, buf);
}

View file

@ -47,7 +47,7 @@
#define __GST_OPENCV_TEXT_OVERLAY_H__
#include <gst/gst.h>
#include "gstopencvutils.h"
G_BEGIN_DECLS
/* #defines don't like whitespacey bits */