update for videofilter changes.

This commit is contained in:
Wim Taymans 2011-12-21 23:51:03 +01:00
parent 4b8975f867
commit 2214657113
36 changed files with 394 additions and 1006 deletions

View file

@ -156,17 +156,17 @@ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }"));
g_static_mutex_unlock (&alpha->lock); \ g_static_mutex_unlock (&alpha->lock); \
} G_STMT_END } G_STMT_END
static gboolean gst_alpha_get_unit_size (GstBaseTransform * btrans,
GstCaps * caps, gsize * size);
static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans, static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter); GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_alpha_set_caps (GstBaseTransform * btrans,
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_alpha_transform (GstBaseTransform * btrans,
GstBuffer * in, GstBuffer * out);
static void gst_alpha_before_transform (GstBaseTransform * btrans, static void gst_alpha_before_transform (GstBaseTransform * btrans,
GstBuffer * buf); GstBuffer * buf);
static gboolean gst_alpha_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
static GstFlowReturn gst_alpha_transform_frame (GstVideoFilter * filter,
GstVideoFrame * in_frame, GstVideoFrame * out_frame);
static void gst_alpha_init_params (GstAlpha * alpha); static void gst_alpha_init_params (GstAlpha * alpha);
static gboolean gst_alpha_set_process_function (GstAlpha * alpha); static gboolean gst_alpha_set_process_function (GstAlpha * alpha);
@ -204,6 +204,7 @@ gst_alpha_class_init (GstAlphaClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0, GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
"alpha - Element for adding alpha channel to streams"); "alpha - Element for adding alpha channel to streams");
@ -269,12 +270,13 @@ gst_alpha_class_init (GstAlphaClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_alpha_src_template)); gst_static_pad_template_get (&gst_alpha_src_template));
btrans_class->transform = GST_DEBUG_FUNCPTR (gst_alpha_transform);
btrans_class->before_transform = btrans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_alpha_before_transform); GST_DEBUG_FUNCPTR (gst_alpha_before_transform);
btrans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_alpha_get_unit_size);
btrans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_alpha_transform_caps); btrans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_alpha_transform_caps);
btrans_class->set_caps = GST_DEBUG_FUNCPTR (gst_alpha_set_caps);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_alpha_transform_frame);
} }
static void static void
@ -434,25 +436,6 @@ gst_alpha_get_property (GObject * object, guint prop_id, GValue * value,
} }
} }
static gboolean
gst_alpha_get_unit_size (GstBaseTransform * btrans,
GstCaps * caps, gsize * size)
{
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, caps))
return FALSE;
*size = info.size;
GST_DEBUG_OBJECT (btrans,
"unit size = %" G_GSIZE_FORMAT " for format %s w %d height %d", *size,
GST_VIDEO_INFO_NAME (&info), GST_VIDEO_INFO_WIDTH (&info),
GST_VIDEO_INFO_HEIGHT (&info));
return TRUE;
}
static GstCaps * static GstCaps *
gst_alpha_transform_caps (GstBaseTransform * btrans, gst_alpha_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter) GstPadDirection direction, GstCaps * caps, GstCaps * filter)
@ -518,34 +501,28 @@ gst_alpha_transform_caps (GstBaseTransform * btrans,
} }
static gboolean static gboolean
gst_alpha_set_caps (GstBaseTransform * btrans, gst_alpha_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstCaps * outcaps) GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info)
{ {
GstAlpha *alpha = GST_ALPHA (btrans); GstAlpha *alpha = GST_ALPHA (filter);
GstVideoInfo in_info, out_info;
gboolean passthrough; gboolean passthrough;
if (!gst_video_info_from_caps (&in_info, incaps) ||
!gst_video_info_from_caps (&out_info, outcaps))
goto invalid_format;
GST_ALPHA_LOCK (alpha); GST_ALPHA_LOCK (alpha);
alpha->in_sdtv = in_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601; alpha->in_sdtv = in_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
alpha->out_sdtv = out_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601; alpha->out_sdtv = out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
passthrough = alpha->prefer_passthrough && passthrough = alpha->prefer_passthrough &&
GST_VIDEO_INFO_FORMAT (&in_info) == GST_VIDEO_INFO_FORMAT (&out_info) GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
&& alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET && alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET
&& alpha->alpha == 1.0; && alpha->alpha == 1.0;
GST_DEBUG_OBJECT (alpha, GST_DEBUG_OBJECT (alpha,
"Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT "Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT
" (passthrough: %d)", incaps, outcaps, passthrough); " (passthrough: %d)", incaps, outcaps, passthrough);
gst_base_transform_set_passthrough (btrans, passthrough); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (filter),
passthrough);
alpha->in_info = in_info;
alpha->out_info = out_info;
if (!gst_alpha_set_process_function (alpha) && !passthrough) if (!gst_alpha_set_process_function (alpha) && !passthrough)
goto no_process; goto no_process;
@ -557,13 +534,6 @@ gst_alpha_set_caps (GstBaseTransform * btrans,
return TRUE; return TRUE;
/* ERRORS */ /* ERRORS */
invalid_format:
{
GST_WARNING_OBJECT (alpha,
"Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps,
outcaps);
return FALSE;
}
no_process: no_process:
{ {
GST_WARNING_OBJECT (alpha, GST_WARNING_OBJECT (alpha,
@ -2325,8 +2295,8 @@ gst_alpha_init_params (GstAlpha * alpha)
const GstVideoFormatInfo *in_info, *out_info; const GstVideoFormatInfo *in_info, *out_info;
const gint *matrix; const gint *matrix;
in_info = alpha->in_info.finfo; in_info = GST_VIDEO_FILTER (alpha)->in_info.finfo;
out_info = alpha->out_info.finfo; out_info = GST_VIDEO_FILTER (alpha)->out_info.finfo;
/* RGB->RGB: convert to SDTV YUV, chroma keying, convert back /* RGB->RGB: convert to SDTV YUV, chroma keying, convert back
* YUV->RGB: chroma keying, convert to RGB * YUV->RGB: chroma keying, convert to RGB
@ -2390,13 +2360,18 @@ gst_alpha_init_params (GstAlpha * alpha)
static gboolean static gboolean
gst_alpha_set_process_function (GstAlpha * alpha) gst_alpha_set_process_function (GstAlpha * alpha)
{ {
GstVideoInfo *in_info, *out_info;
alpha->process = NULL; alpha->process = NULL;
in_info = &GST_VIDEO_FILTER_CAST (alpha)->in_info;
out_info = &GST_VIDEO_FILTER_CAST (alpha)->out_info;
switch (alpha->method) { switch (alpha->method) {
case ALPHA_METHOD_SET: case ALPHA_METHOD_SET:
switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) { switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_ayuv; alpha->process = gst_alpha_set_ayuv_ayuv;
break; break;
@ -2434,7 +2409,7 @@ gst_alpha_set_process_function (GstAlpha * alpha)
case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_BGRA:
switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) { switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_set_ayuv_argb; alpha->process = gst_alpha_set_ayuv_argb;
break; break;
@ -2476,9 +2451,9 @@ gst_alpha_set_process_function (GstAlpha * alpha)
case ALPHA_METHOD_GREEN: case ALPHA_METHOD_GREEN:
case ALPHA_METHOD_BLUE: case ALPHA_METHOD_BLUE:
case ALPHA_METHOD_CUSTOM: case ALPHA_METHOD_CUSTOM:
switch (GST_VIDEO_INFO_FORMAT (&alpha->out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) { switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_ayuv; alpha->process = gst_alpha_chroma_key_ayuv_ayuv;
break; break;
@ -2516,7 +2491,7 @@ gst_alpha_set_process_function (GstAlpha * alpha)
case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_ABGR:
case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_BGRA:
switch (GST_VIDEO_INFO_FORMAT (&alpha->in_info)) { switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
alpha->process = gst_alpha_chroma_key_ayuv_argb; alpha->process = gst_alpha_chroma_key_ayuv_argb;
break; break;
@ -2575,26 +2550,17 @@ gst_alpha_before_transform (GstBaseTransform * btrans, GstBuffer * buf)
} }
static GstFlowReturn static GstFlowReturn
gst_alpha_transform (GstBaseTransform * btrans, GstBuffer * in, GstBuffer * out) gst_alpha_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstAlpha *alpha = GST_ALPHA (btrans); GstAlpha *alpha = GST_ALPHA (filter);
GstVideoFrame in_frame, out_frame;
GST_ALPHA_LOCK (alpha); GST_ALPHA_LOCK (alpha);
if (G_UNLIKELY (!alpha->process)) if (G_UNLIKELY (!alpha->process))
goto not_negotiated; goto not_negotiated;
if (!gst_video_frame_map (&in_frame, &alpha->in_info, in, GST_MAP_READ)) alpha->process (in_frame, out_frame, alpha);
goto invalid_in;
if (!gst_video_frame_map (&out_frame, &alpha->out_info, out, GST_MAP_WRITE))
goto invalid_out;
alpha->process (&in_frame, &out_frame, alpha);
gst_video_frame_unmap (&out_frame);
gst_video_frame_unmap (&in_frame);
GST_ALPHA_UNLOCK (alpha); GST_ALPHA_UNLOCK (alpha);
@ -2607,19 +2573,6 @@ not_negotiated:
GST_ALPHA_UNLOCK (alpha); GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
} }
invalid_in:
{
GST_ERROR_OBJECT (alpha, "Invalid input frame");
GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_OK;
}
invalid_out:
{
GST_ERROR_OBJECT (alpha, "Invalid output frame");
gst_video_frame_unmap (&in_frame);
GST_ALPHA_UNLOCK (alpha);
return GST_FLOW_OK;
}
} }
static gboolean static gboolean

View file

@ -71,7 +71,6 @@ struct _GstAlpha
/* caps */ /* caps */
GStaticMutex lock; GStaticMutex lock;
GstVideoInfo in_info, out_info;
gboolean in_sdtv, out_sdtv; gboolean in_sdtv, out_sdtv;
/* properties */ /* properties */

View file

@ -62,10 +62,12 @@ G_DEFINE_TYPE (GstAlphaColor, gst_alpha_color, GST_TYPE_VIDEO_FILTER);
static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans, static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter); GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_alpha_color_set_caps (GstBaseTransform * btrans,
GstCaps * incaps, GstCaps * outcaps); static gboolean gst_alpha_color_set_info (GstVideoFilter * filter,
static GstFlowReturn gst_alpha_color_transform_ip (GstBaseTransform * btrans, GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstBuffer * inbuf); GstVideoInfo * out_info);
static GstFlowReturn gst_alpha_color_transform_frame_ip (GstVideoFilter *
filter, GstVideoFrame * frame);
static void static void
gst_alpha_color_class_init (GstAlphaColorClass * klass) gst_alpha_color_class_init (GstAlphaColorClass * klass)
@ -73,6 +75,7 @@ gst_alpha_color_class_init (GstAlphaColorClass * klass)
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *gstbasetransform_class = GstBaseTransformClass *gstbasetransform_class =
(GstBaseTransformClass *) klass; (GstBaseTransformClass *) klass;
GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0, GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
"ARGB<->AYUV colorspace conversion preserving the alpha channels"); "ARGB<->AYUV colorspace conversion preserving the alpha channels");
@ -89,10 +92,10 @@ gst_alpha_color_class_init (GstAlphaColorClass * klass)
gstbasetransform_class->transform_caps = gstbasetransform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps); GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps);
gstbasetransform_class->set_caps =
GST_DEBUG_FUNCPTR (gst_alpha_color_set_caps); gstvideofilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_color_set_info);
gstbasetransform_class->transform_ip = gstvideofilter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_alpha_color_transform_ip); GST_DEBUG_FUNCPTR (gst_alpha_color_transform_frame_ip);
} }
static void static void
@ -471,32 +474,25 @@ transform_rgba_argb (GstVideoFrame * frame, const gint * matrix)
#define transform_bgra_abgr transform_rgba_argb #define transform_bgra_abgr transform_rgba_argb
static gboolean static gboolean
gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_alpha_color_set_info (GstVideoFilter * filter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans); GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
gboolean ret;
GstVideoInfo in_info, out_info;
gboolean in_sdtv, out_sdtv; gboolean in_sdtv, out_sdtv;
alpha->process = NULL; alpha->process = NULL;
alpha->matrix = NULL; alpha->matrix = NULL;
ret = gst_video_info_from_caps (&in_info, incaps); if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info) ||
ret &= gst_video_info_from_caps (&out_info, outcaps); GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info))
if (!ret)
goto invalid_caps; goto invalid_caps;
if (GST_VIDEO_INFO_WIDTH (&in_info) != GST_VIDEO_INFO_WIDTH (&out_info) || in_sdtv = in_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
GST_VIDEO_INFO_HEIGHT (&in_info) != GST_VIDEO_INFO_HEIGHT (&out_info)) out_sdtv = out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
goto invalid_caps;
in_sdtv = in_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601; switch (GST_VIDEO_INFO_FORMAT (in_info)) {
out_sdtv = out_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
switch (GST_VIDEO_INFO_FORMAT (&in_info)) {
case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ARGB:
switch (GST_VIDEO_INFO_FORMAT (&out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ARGB:
alpha->process = NULL; alpha->process = NULL;
alpha->matrix = NULL; alpha->matrix = NULL;
@ -526,7 +522,7 @@ gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
} }
break; break;
case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_BGRA:
switch (GST_VIDEO_INFO_FORMAT (&out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_BGRA:
alpha->process = NULL; alpha->process = NULL;
alpha->matrix = NULL; alpha->matrix = NULL;
@ -556,7 +552,7 @@ gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
} }
break; break;
case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_ABGR:
switch (GST_VIDEO_INFO_FORMAT (&out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_ABGR:
alpha->process = NULL; alpha->process = NULL;
alpha->matrix = NULL; alpha->matrix = NULL;
@ -586,7 +582,7 @@ gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
} }
break; break;
case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_RGBA:
switch (GST_VIDEO_INFO_FORMAT (&out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_RGBA:
alpha->process = NULL; alpha->process = NULL;
alpha->matrix = NULL; alpha->matrix = NULL;
@ -616,7 +612,7 @@ gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
} }
break; break;
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
switch (GST_VIDEO_INFO_FORMAT (&out_info)) { switch (GST_VIDEO_INFO_FORMAT (out_info)) {
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
if (in_sdtv == out_sdtv) { if (in_sdtv == out_sdtv) {
alpha->process = transform_ayuv_ayuv; alpha->process = transform_ayuv_ayuv;
@ -664,9 +660,9 @@ gst_alpha_color_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
break; break;
} }
if (GST_VIDEO_INFO_FORMAT (&in_info) == GST_VIDEO_INFO_FORMAT (&out_info) if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
&& in_sdtv == out_sdtv) && in_sdtv == out_sdtv)
gst_base_transform_set_passthrough (btrans, TRUE); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
else if (!alpha->process) else if (!alpha->process)
goto no_process; goto no_process;
@ -686,24 +682,19 @@ no_process:
} }
static GstFlowReturn static GstFlowReturn
gst_alpha_color_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf) gst_alpha_color_transform_frame_ip (GstVideoFilter * filter,
GstVideoFrame * frame)
{ {
GstAlphaColor *alpha = GST_ALPHA_COLOR (btrans); GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
GstVideoFrame frame;
if (gst_base_transform_is_passthrough (btrans)) if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (filter)))
return GST_FLOW_OK; return GST_FLOW_OK;
if (G_UNLIKELY (!alpha->process)) if (G_UNLIKELY (!alpha->process))
goto not_negotiated; goto not_negotiated;
if (!gst_video_frame_map (&frame, &alpha->in_info, inbuf, GST_MAP_READWRITE))
goto invalid_buffer;
/* Transform in place */ /* Transform in place */
alpha->process (&frame, alpha->matrix); alpha->process (frame, alpha->matrix);
gst_video_frame_unmap (&frame);
return GST_FLOW_OK; return GST_FLOW_OK;
@ -713,11 +704,6 @@ not_negotiated:
GST_ERROR_OBJECT (alpha, "Not negotiated yet"); GST_ERROR_OBJECT (alpha, "Not negotiated yet");
return GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
} }
invalid_buffer:
{
GST_ERROR_OBJECT (alpha, "Invalid buffer received");
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean

View file

@ -42,10 +42,8 @@ struct _GstAlphaColor
GstVideoFilter parent; GstVideoFilter parent;
/*< private >*/ /*< private >*/
/* caps */
GstVideoInfo in_info, out_info;
void (*process) (GstVideoFrame * frame, const gint * matrix); void (*process) (GstVideoFrame * frame, const gint * matrix);
const gint *matrix; const gint *matrix;
}; };

View file

@ -55,19 +55,22 @@ G_DEFINE_TYPE (GstNavigationtest, gst_navigationtest, GST_TYPE_VIDEO_FILTER);
static gboolean static gboolean
gst_navigationtest_src_event (GstBaseTransform * trans, GstEvent * event) gst_navigationtest_src_event (GstBaseTransform * trans, GstEvent * event)
{ {
GstVideoInfo *info;
GstNavigationtest *navtest; GstNavigationtest *navtest;
const gchar *type; const gchar *type;
navtest = GST_NAVIGATIONTEST (trans); navtest = GST_NAVIGATIONTEST (trans);
info = &GST_VIDEO_FILTER (trans)->in_info;
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION: case GST_EVENT_NAVIGATION:
{ {
const GstStructure *s = gst_event_get_structure (event); const GstStructure *s = gst_event_get_structure (event);
gint fps_n, fps_d; gint fps_n, fps_d;
fps_n = GST_VIDEO_INFO_FPS_N (&navtest->info); fps_n = GST_VIDEO_INFO_FPS_N (info);
fps_d = GST_VIDEO_INFO_FPS_D (&navtest->info); fps_d = GST_VIDEO_INFO_FPS_D (info);
type = gst_structure_get_string (s, "event"); type = gst_structure_get_string (s, "event");
if (g_str_equal (type, "mouse-move")) { if (g_str_equal (type, "mouse-move")) {
@ -115,53 +118,6 @@ gst_navigationtest_src_event (GstBaseTransform * trans, GstEvent * event)
#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2)) #define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static gboolean
gst_navigationtest_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
gsize * size)
{
GstNavigationtest *navtest;
GstStructure *structure;
gboolean ret = FALSE;
gint width, height;
navtest = GST_NAVIGATIONTEST (btrans);
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height)) {
*size = GST_VIDEO_I420_SIZE (width, height);
ret = TRUE;
GST_DEBUG_OBJECT (navtest,
"our frame size is %" G_GSIZE_FORMAT " bytes (%dx%d)", *size, width,
height);
}
return ret;
}
static gboolean
gst_navigationtest_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstNavigationtest *navtest = GST_NAVIGATIONTEST (btrans);
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
navtest->info = info;
return TRUE;
/* ERRORS */
invalid_caps:
{
GST_ERROR_OBJECT (navtest, "invalid caps");
return FALSE;
}
}
static void static void
draw_box_planar411 (GstVideoFrame * frame, int x, int y, draw_box_planar411 (GstVideoFrame * frame, int x, int y,
guint8 colory, guint8 coloru, guint8 colorv) guint8 colory, guint8 coloru, guint8 colorv)
@ -215,53 +171,30 @@ draw_box_planar411 (GstVideoFrame * frame, int x, int y,
} }
static GstFlowReturn static GstFlowReturn
gst_navigationtest_transform (GstBaseTransform * trans, GstBuffer * in, gst_navigationtest_transform_frame (GstVideoFilter * filter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstNavigationtest *navtest = GST_NAVIGATIONTEST (trans); GstNavigationtest *navtest = GST_NAVIGATIONTEST (filter);
GSList *walk; GSList *walk;
GstVideoFrame in_frame, out_frame;
if (!gst_video_frame_map (&in_frame, &navtest->info, in, GST_MAP_READ)) gst_video_frame_copy (out_frame, in_frame);
goto invalid_in;
if (!gst_video_frame_map (&out_frame, &navtest->info, out, GST_MAP_WRITE))
goto invalid_out;
gst_video_frame_copy (&out_frame, &in_frame);
walk = navtest->clicks; walk = navtest->clicks;
while (walk) { while (walk) {
ButtonClick *click = walk->data; ButtonClick *click = walk->data;
walk = g_slist_next (walk); walk = g_slist_next (walk);
draw_box_planar411 (&out_frame, draw_box_planar411 (out_frame,
rint (click->x), rint (click->y), click->cy, click->cu, click->cv); rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
if (--click->images_left < 1) { if (--click->images_left < 1) {
navtest->clicks = g_slist_remove (navtest->clicks, click); navtest->clicks = g_slist_remove (navtest->clicks, click);
g_free (click); g_free (click);
} }
} }
draw_box_planar411 (&out_frame, draw_box_planar411 (out_frame,
rint (navtest->x), rint (navtest->y), 0, 128, 128); rint (navtest->x), rint (navtest->y), 0, 128, 128);
gst_video_frame_unmap (&out_frame);
gst_video_frame_unmap (&in_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_ERROR_OBJECT (navtest, "received invalid input buffer");
return GST_FLOW_OK;
}
invalid_out:
{
GST_ERROR_OBJECT (navtest, "received invalid output buffer");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_OK;
}
} }
static GstStateChangeReturn static GstStateChangeReturn
@ -295,11 +228,11 @@ gst_navigationtest_class_init (GstNavigationtestClass * klass)
{ {
GstElementClass *element_class; GstElementClass *element_class;
GstBaseTransformClass *trans_class; GstBaseTransformClass *trans_class;
GstVideoFilterClass *vfilter_class;
element_class = (GstElementClass *) klass; element_class = (GstElementClass *) klass;
trans_class = (GstBaseTransformClass *) klass; trans_class = (GstBaseTransformClass *) klass;
vfilter_class = (GstVideoFilterClass *) klass;
parent_class = g_type_class_peek_parent (klass);
element_class->change_state = element_class->change_state =
GST_DEBUG_FUNCPTR (gst_navigationtest_change_state); GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
@ -314,11 +247,10 @@ gst_navigationtest_class_init (GstNavigationtestClass * klass)
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_navigationtest_src_template)); gst_static_pad_template_get (&gst_navigationtest_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_navigationtest_set_caps);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_navigationtest_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_navigationtest_transform);
trans_class->src_event = GST_DEBUG_FUNCPTR (gst_navigationtest_src_event); trans_class->src_event = GST_DEBUG_FUNCPTR (gst_navigationtest_src_event);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_navigationtest_transform_frame);
} }
static void static void

View file

@ -50,8 +50,6 @@ struct _GstNavigationtest
{ {
GstVideoFilter videofilter; GstVideoFilter videofilter;
GstVideoInfo info;
gdouble x, y; gdouble x, y;
GSList *clicks; GSList *clicks;
}; };

View file

@ -87,28 +87,6 @@ GST_STATIC_PAD_TEMPLATE ("sink",
G_DEFINE_TYPE (GstAgingTV, gst_agingtv, GST_TYPE_VIDEO_FILTER); G_DEFINE_TYPE (GstAgingTV, gst_agingtv, GST_TYPE_VIDEO_FILTER);
static gboolean
gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstAgingTV *filter = GST_AGINGTV (btrans);
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
filter->info = info;
return TRUE;
/* ERRORS */
invalid_caps:
{
GST_ERROR_OBJECT (filter, "could not parse caps");
return GST_FLOW_ERROR;
}
}
static void static void
coloraging (guint32 * src, guint32 * dest, gint video_area, gint * c) coloraging (guint32 * src, guint32 * dest, gint video_area, gint * c)
{ {
@ -321,19 +299,19 @@ gst_agingtv_start (GstBaseTransform * trans)
} }
static GstFlowReturn static GstFlowReturn
gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in, gst_agingtv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
GstBuffer * out) GstVideoFrame * out_frame)
{ {
GstAgingTV *agingtv = GST_AGINGTV (trans); GstAgingTV *agingtv = GST_AGINGTV (filter);
GstVideoFrame in_frame, out_frame;
gint area_scale; gint area_scale;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
gint width, height, stride, video_size; gint width, height, stride, video_size;
guint32 *src, *dest; guint32 *src, *dest;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (agingtv, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (agingtv, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -341,20 +319,13 @@ gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (agingtv), stream_time); gst_object_sync_values (GST_OBJECT (agingtv), stream_time);
if (!gst_video_frame_map (&in_frame, &agingtv->info, in, GST_MAP_READ)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_in; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
if (!gst_video_frame_map (&out_frame, &agingtv->info, out, GST_MAP_WRITE))
goto invalid_out;
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (&in_frame, 0);
video_size = stride * height; video_size = stride * height;
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0); src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0); dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
area_scale = width * height / 64 / 480; area_scale = width * height / 64 / 480;
if (area_scale <= 0) if (area_scale <= 0)
@ -371,23 +342,7 @@ gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
if (area_scale > 1 && agingtv->dusts) if (area_scale > 1 && agingtv->dusts)
dusts (dest, width, height, &agingtv->dust_interval, area_scale); dusts (dest, width, height, &agingtv->dust_interval, area_scale);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (agingtv, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (agingtv, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static void static void
@ -396,6 +351,7 @@ gst_agingtv_class_init (GstAgingTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_agingtv_set_property; gobject_class->set_property = gst_agingtv_set_property;
gobject_class->get_property = gst_agingtv_get_property; gobject_class->get_property = gst_agingtv_get_property;
@ -430,9 +386,10 @@ gst_agingtv_class_init (GstAgingTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_agingtv_src_template)); gst_static_pad_template_get (&gst_agingtv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_agingtv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_agingtv_start);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_agingtv_transform_frame);
} }
static void static void

View file

@ -62,8 +62,6 @@ struct _GstAgingTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
gboolean color_aging; gboolean color_aging;
gboolean pits; gboolean pits;
gboolean dusts; gboolean dusts;

View file

@ -91,38 +91,25 @@ enum
}; };
static gboolean static gboolean
gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_dicetv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstDiceTV *filter = GST_DICETV (btrans); GstDiceTV *filter = GST_DICETV (vfilter);
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
g_free (filter->dicemap); g_free (filter->dicemap);
filter->dicemap = filter->dicemap =
(guint8 *) g_malloc (GST_VIDEO_INFO_WIDTH (&info) * (guint8 *) g_malloc (GST_VIDEO_INFO_WIDTH (in_info) *
GST_VIDEO_INFO_WIDTH (&info)); GST_VIDEO_INFO_WIDTH (in_info));
gst_dicetv_create_map (filter); gst_dicetv_create_map (filter);
filter->info = info;
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps");
return FALSE;
}
} }
static GstFlowReturn static GstFlowReturn
gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) gst_dicetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstDiceTV *filter = GST_DICETV (trans); GstDiceTV *filter = GST_DICETV (vfilter);
GstVideoFrame in_frame, out_frame;
guint32 *src, *dest; guint32 *src, *dest;
gint i, map_x, map_y, map_i, base, dx, dy, di; gint i, map_x, map_y, map_i, base, dx, dy, di;
gint video_stride, g_cube_bits, g_cube_size; gint video_stride, g_cube_bits, g_cube_size;
@ -130,9 +117,10 @@ gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
const guint8 *dicemap; const guint8 *dicemap;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -140,12 +128,9 @@ gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time); gst_object_sync_values (GST_OBJECT (filter), stream_time);
gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ); src = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE); dest = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
video_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
src = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
video_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&in_frame, 0);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
g_cube_bits = filter->g_cube_bits; g_cube_bits = filter->g_cube_bits;
@ -211,9 +196,6 @@ gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@ -222,9 +204,10 @@ gst_dicetv_create_map (GstDiceTV * filter)
{ {
gint x, y, i; gint x, y, i;
gint width, height; gint width, height;
GstVideoInfo *info = &GST_VIDEO_FILTER (filter)->in_info;
width = GST_VIDEO_INFO_WIDTH (&filter->info); width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (&filter->info); height = GST_VIDEO_INFO_HEIGHT (info);
if (width <= 0 || height <= 0) if (width <= 0 || height <= 0)
return; return;
@ -295,7 +278,7 @@ gst_dicetv_class_init (GstDiceTVClass * klass)
{ {
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_dicetv_set_property; gobject_class->set_property = gst_dicetv_set_property;
gobject_class->get_property = gst_dicetv_get_property; gobject_class->get_property = gst_dicetv_get_property;
@ -316,8 +299,9 @@ gst_dicetv_class_init (GstDiceTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_dicetv_src_template)); gst_static_pad_template_get (&gst_dicetv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps); vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_dicetv_set_info);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform); vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_dicetv_transform_frame);
} }
static void static void

View file

@ -55,7 +55,6 @@ struct _GstDiceTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
guint8 *dicemap; guint8 *dicemap;
gint g_cube_bits; gint g_cube_bits;

View file

@ -70,21 +70,15 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static gboolean static gboolean
gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_edgetv_set_info (GstVideoFilter * filter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstEdgeTV *edgetv = GST_EDGETV (btrans); GstEdgeTV *edgetv = GST_EDGETV (filter);
GstVideoInfo info;
guint map_size; guint map_size;
gint width, height; gint width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
edgetv->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
edgetv->map_width = width / 4; edgetv->map_width = width / 4;
edgetv->map_height = height / 4; edgetv->map_height = height / 4;
@ -96,19 +90,13 @@ gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
edgetv->map = (guint32 *) g_malloc0 (map_size); edgetv->map = (guint32 *) g_malloc0 (map_size);
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (btrans, "could not parse caps");
return FALSE;
}
} }
static GstFlowReturn static GstFlowReturn
gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstEdgeTV *filter = GST_EDGETV (trans); GstEdgeTV *filter = GST_EDGETV (vfilter);
gint x, y, r, g, b; gint x, y, r, g, b;
guint32 *src, *dest; guint32 *src, *dest;
guint32 p, q; guint32 p, q;
@ -117,20 +105,16 @@ gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
gint video_width_margin; gint video_width_margin;
guint32 *map; guint32 *map;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GstVideoFrame in_frame, out_frame;
map = filter->map; map = filter->map;
map_height = filter->map_height; map_height = filter->map_height;
map_width = filter->map_width; map_width = filter->map_width;
video_width_margin = filter->video_width_margin; video_width_margin = filter->video_width_margin;
gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ); src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE); dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0); width = GST_VIDEO_FRAME_WIDTH (in_frame);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
src += width * 4 + 4; src += width * 4 + 4;
dest += width * 4 + 4; dest += width * 4 + 4;
@ -244,6 +228,7 @@ gst_edgetv_class_init (GstEdgeTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->finalize = gst_edgetv_finalize; gobject_class->finalize = gst_edgetv_finalize;
@ -256,9 +241,11 @@ gst_edgetv_class_init (GstEdgeTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_edgetv_src_template)); gst_static_pad_template_get (&gst_edgetv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_edgetv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_edgetv_transform_frame);
} }
static void static void

View file

@ -53,7 +53,6 @@ struct _GstEdgeTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
gint map_width, map_height; gint map_width, map_height;
guint32 *map; guint32 *map;
gint video_width_margin; gint video_width_margin;

View file

@ -199,20 +199,21 @@ image_y_over (guint32 * src, guint8 * diff, gint y_threshold, gint video_area)
} }
static GstFlowReturn static GstFlowReturn
gst_optv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) gst_optv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstOpTV *filter = GST_OPTV (trans); GstOpTV *filter = GST_OPTV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
GstVideoFrame in_frame, out_frame;
gint8 *p; gint8 *p;
guint8 *diff; guint8 *diff;
gint x, y, width, height; gint x, y, width, height;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
guint8 phase; guint8 phase;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -223,17 +224,11 @@ gst_optv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
if (G_UNLIKELY (filter->opmap[0] == NULL)) if (G_UNLIKELY (filter->opmap[0] == NULL))
return GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
switch (filter->mode) { switch (filter->mode) {
@ -266,40 +261,18 @@ gst_optv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean
gst_optv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_optv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstOpTV *filter = GST_OPTV (btrans); GstOpTV *filter = GST_OPTV (vfilter);
GstVideoInfo info;
gint i, width, height; gint i, width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
if (filter->opmap[i]) if (filter->opmap[i])
@ -313,13 +286,6 @@ gst_optv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
filter->diff = g_new (guint8, width * height); filter->diff = g_new (guint8, width * height);
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static gboolean static gboolean
@ -406,6 +372,7 @@ gst_optv_class_init (GstOpTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_optv_set_property; gobject_class->set_property = gst_optv_set_property;
gobject_class->get_property = gst_optv_get_property; gobject_class->get_property = gst_optv_get_property;
@ -438,10 +405,11 @@ gst_optv_class_init (GstOpTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_optv_src_template)); gst_static_pad_template_get (&gst_optv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_optv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_optv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_optv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_optv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_optv_set_info);
vfilter_class->transform_frame = GST_DEBUG_FUNCPTR (gst_optv_transform_frame);
initPalette (); initPalette ();
} }

View file

@ -53,8 +53,6 @@ struct _GstOpTV
GstVideoFilter element; GstVideoFilter element;
/* < private > */ /* < private > */
GstVideoInfo info;
gint mode; gint mode;
gint speed; gint speed;
guint threshold; guint threshold;

View file

@ -78,49 +78,36 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static gboolean static gboolean
gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_quarktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstQuarkTV *filter = GST_QUARKTV (btrans); GstQuarkTV *filter = GST_QUARKTV (vfilter);
GstVideoInfo info;
gint width, height; gint width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
gst_quarktv_planetable_clear (filter); gst_quarktv_planetable_clear (filter);
filter->area = width * height; filter->area = width * height;
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static GstFlowReturn static GstFlowReturn
gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in, gst_quarktv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
GstBuffer * out) GstVideoFrame * out_frame)
{ {
GstQuarkTV *filter = GST_QUARKTV (trans); GstQuarkTV *filter = GST_QUARKTV (vfilter);
gint area; gint area;
guint32 *src, *dest; guint32 *src, *dest;
GstClockTime timestamp; GstClockTime timestamp;
GstBuffer **planetable; GstBuffer **planetable;
gint planes, current_plane; gint planes, current_plane;
GstVideoFrame in_frame, out_frame;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
timestamp = timestamp =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -131,14 +118,8 @@ gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
if (G_UNLIKELY (filter->planetable == NULL)) if (G_UNLIKELY (filter->planetable == NULL))
return GST_FLOW_WRONG_STATE; return GST_FLOW_WRONG_STATE;
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE))
goto invalid_out;
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
area = filter->area; area = filter->area;
@ -148,7 +129,7 @@ gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
if (planetable[current_plane]) if (planetable[current_plane])
gst_buffer_unref (planetable[current_plane]); gst_buffer_unref (planetable[current_plane]);
planetable[current_plane] = gst_buffer_ref (in); planetable[current_plane] = gst_buffer_ref (in_frame->buffer);
/* For each pixel */ /* For each pixel */
while (--area) { while (--area) {
@ -169,23 +150,7 @@ gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
filter->current_plane = planes - 1; filter->current_plane = planes - 1;
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static void static void
@ -298,6 +263,7 @@ gst_quarktv_class_init (GstQuarkTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_quarktv_set_property; gobject_class->set_property = gst_quarktv_set_property;
gobject_class->get_property = gst_quarktv_get_property; gobject_class->get_property = gst_quarktv_get_property;
@ -318,9 +284,11 @@ gst_quarktv_class_init (GstQuarkTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_quarktv_src_template)); gst_static_pad_template_get (&gst_quarktv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_quarktv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_quarktv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_quarktv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_quarktv_transform_frame);
} }
static void static void

View file

@ -53,8 +53,6 @@ struct _GstQuarkTV
GstVideoFilter element; GstVideoFilter element;
/* < private > */ /* < private > */
GstVideoInfo info;
gint area; gint area;
gint planes; gint planes;
gint current_plane; gint current_plane;

View file

@ -233,9 +233,12 @@ blur (GstRadioacTV * filter)
gint width; gint width;
guint8 *p, *q; guint8 *p, *q;
guint8 v; guint8 v;
GstVideoInfo *info;
info = &GST_VIDEO_FILTER (filter)->in_info;
width = filter->buf_width; width = filter->buf_width;
p = filter->blurzoombuf + GST_VIDEO_INFO_WIDTH (&filter->info) + 1; p = filter->blurzoombuf + GST_VIDEO_INFO_WIDTH (info) + 1;
q = p + filter->buf_area; q = p + filter->buf_area;
for (y = filter->buf_height - 2; y > 0; y--) { for (y = filter->buf_height - 2; y > 0; y--) {
@ -315,21 +318,21 @@ image_bgsubtract_update_y (guint32 * src, gint16 * background, guint8 * diff,
} }
static GstFlowReturn static GstFlowReturn
gst_radioactv_transform (GstBaseTransform * trans, GstBuffer * in, gst_radioactv_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstRadioacTV *filter = GST_RADIOACTV (trans); GstRadioacTV *filter = GST_RADIOACTV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
GstVideoFrame in_frame, out_frame;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
gint x, y, width, height; gint x, y, width, height;
guint32 a, b; guint32 a, b;
guint8 *diff, *p; guint8 *diff, *p;
guint32 *palette; guint32 *palette;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -337,27 +340,21 @@ gst_radioactv_transform (GstBaseTransform * trans, GstBuffer * in,
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time); gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN #if G_BYTE_ORDER == G_LITTLE_ENDIAN
if (GST_VIDEO_FRAME_FORMAT (&in_frame) == GST_VIDEO_FORMAT_RGBx) { if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_RGBx) {
palette = &palettes[COLORS * filter->color]; palette = &palettes[COLORS * filter->color];
} else { } else {
palette = &palettes[COLORS * swap_tab[filter->color]]; palette = &palettes[COLORS * swap_tab[filter->color]];
} }
#else #else
if (GST_VIDEO_FRAME_FORMAT (&in_frame) == GST_VIDEO_FORMAT_xBGR) { if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_xBGR) {
palette = &palettes[COLORS * filter->color]; palette = &palettes[COLORS * filter->color];
} else { } else {
palette = &palettes[COLORS * swap_tab[filter->color]]; palette = &palettes[COLORS * swap_tab[filter->color]];
@ -418,40 +415,18 @@ gst_radioactv_transform (GstBaseTransform * trans, GstBuffer * in,
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean
gst_radioactv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_radioactv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstRadioacTV *filter = GST_RADIOACTV (btrans); GstRadioacTV *filter = GST_RADIOACTV (vfilter);
GstVideoInfo info;
gint width, height; gint width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
filter->buf_width_blocks = width / 32; filter->buf_width_blocks = width / 32;
if (filter->buf_width_blocks > 255) if (filter->buf_width_blocks > 255)
@ -493,11 +468,6 @@ gst_radioactv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
return TRUE; return TRUE;
/* ERRORS */ /* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
too_wide: too_wide:
{ {
GST_DEBUG_OBJECT (filter, "frame too wide"); GST_DEBUG_OBJECT (filter, "frame too wide");
@ -607,6 +577,7 @@ gst_radioactv_class_init (GstRadioacTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_radioactv_set_property; gobject_class->set_property = gst_radioactv_set_property;
gobject_class->get_property = gst_radioactv_get_property; gobject_class->get_property = gst_radioactv_get_property;
@ -644,10 +615,12 @@ gst_radioactv_class_init (GstRadioacTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_radioactv_src_template)); gst_static_pad_template_get (&gst_radioactv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_radioactv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_radioactv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_radioactv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_radioactv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_radioactv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_radioactv_transform_frame);
makePalette (); makePalette ();
} }

View file

@ -53,8 +53,6 @@ struct _GstRadioacTV
GstVideoFilter element; GstVideoFilter element;
/* < private > */ /* < private > */
GstVideoInfo info;
gint mode; gint mode;
gint color; gint color;
guint interval; guint interval;

View file

@ -97,43 +97,22 @@ GST_STATIC_PAD_TEMPLATE ("sink",
GST_STATIC_CAPS (CAPS_STR) GST_STATIC_CAPS (CAPS_STR)
); );
static gboolean
gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstRevTV *filter = GST_REVTV (btrans);
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, incaps))
goto invalid_caps;
filter->info = info;
return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
}
static GstFlowReturn static GstFlowReturn
gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) gst_revtv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstRevTV *filter = GST_REVTV (trans); GstRevTV *filter = GST_REVTV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
gint width, height, sstride, dstride; gint width, height, sstride, dstride;
guint32 *nsrc; guint32 *nsrc;
gint y, x, R, G, B, yval; gint y, x, R, G, B, yval;
gint linespace, vscale; gint linespace, vscale;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
GstVideoFrame in_frame, out_frame;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -141,19 +120,13 @@ gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time); gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
/* Clear everything to black */ /* Clear everything to black */
memset (dest, 0, dstride * height * sizeof (guint32)); memset (dest, 0, dstride * height * sizeof (guint32));
@ -181,23 +154,7 @@ gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static void static void
@ -251,7 +208,7 @@ gst_revtv_class_init (GstRevTVClass * klass)
{ {
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_revtv_set_property; gobject_class->set_property = gst_revtv_set_property;
gobject_class->get_property = gst_revtv_get_property; gobject_class->get_property = gst_revtv_get_property;
@ -278,8 +235,8 @@ gst_revtv_class_init (GstRevTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_revtv_src_template)); gst_static_pad_template_get (&gst_revtv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps); vfilter_class->transform_frame =
trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform); GST_DEBUG_FUNCPTR (gst_revtv_transform_frame);
} }
static void static void

View file

@ -69,8 +69,6 @@ struct _GstRevTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
gint vgrabtime; gint vgrabtime;
gint vgrab; gint vgrab;
gint linespace; gint linespace;

View file

@ -139,9 +139,12 @@ image_bgset_y (guint32 * src, gint16 * background, gint video_area)
static gint static gint
setBackground (GstRippleTV * filter, guint32 * src) setBackground (GstRippleTV * filter, guint32 * src)
{ {
GstVideoInfo *info;
info = &GST_VIDEO_FILTER (filter)->in_info;
image_bgset_y (src, filter->background, image_bgset_y (src, filter->background,
GST_VIDEO_INFO_WIDTH (&filter->info) * GST_VIDEO_INFO_WIDTH (info) * GST_VIDEO_INFO_HEIGHT (info));
GST_VIDEO_INFO_HEIGHT (&filter->info));
filter->bg_is_set = TRUE; filter->bg_is_set = TRUE;
return 0; return 0;
@ -182,9 +185,12 @@ motiondetect (GstRippleTV * filter, guint32 * src)
gint width, height; gint width, height;
gint *p, *q; gint *p, *q;
gint x, y, h; gint x, y, h;
GstVideoInfo *info;
width = GST_VIDEO_INFO_WIDTH (&filter->info); info = &GST_VIDEO_FILTER (filter)->in_info;
height = GST_VIDEO_INFO_HEIGHT (&filter->info);
width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (info);
if (!filter->bg_is_set) if (!filter->bg_is_set)
setBackground (filter, src); setBackground (filter, src);
@ -304,12 +310,11 @@ raindrop (GstRippleTV * filter)
} }
static GstFlowReturn static GstFlowReturn
gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in, gst_rippletv_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstRippleTV *filter = GST_RIPPLETV (trans); GstRippleTV *filter = GST_RIPPLETV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
GstVideoFrame in_frame, out_frame;
gint x, y, i; gint x, y, i;
gint dx, dy, o_dx; gint dx, dy, o_dx;
gint h, v; gint h, v;
@ -318,9 +323,10 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
gint8 *vp; gint8 *vp;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -328,14 +334,8 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time); gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE))
goto invalid_out;
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
/* impact from the motion or rain drop */ /* impact from the motion or rain drop */
@ -346,8 +346,8 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
m_w = filter->map_w; m_w = filter->map_w;
m_h = filter->map_h; m_h = filter->map_h;
v_w = GST_VIDEO_FRAME_WIDTH (&in_frame); v_w = GST_VIDEO_FRAME_WIDTH (in_frame);
v_h = GST_VIDEO_FRAME_HEIGHT (&in_frame); v_h = GST_VIDEO_FRAME_HEIGHT (in_frame);
/* simulate surface wave */ /* simulate surface wave */
@ -441,41 +441,18 @@ gst_rippletv_transform (GstBaseTransform * trans, GstBuffer * in,
} }
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean
gst_rippletv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_rippletv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstRippleTV *filter = GST_RIPPLETV (btrans); GstRippleTV *filter = GST_RIPPLETV (vfilter);
GstVideoInfo info;
gint width, height; gint width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
GST_OBJECT_LOCK (filter); GST_OBJECT_LOCK (filter);
filter->map_h = height / 2 + 1; filter->map_h = height / 2 + 1;
@ -505,13 +482,6 @@ gst_rippletv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static gboolean static gboolean
@ -601,6 +571,7 @@ gst_rippletv_class_init (GstRippleTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_rippletv_set_property; gobject_class->set_property = gst_rippletv_set_property;
gobject_class->get_property = gst_rippletv_get_property; gobject_class->get_property = gst_rippletv_get_property;
@ -628,10 +599,12 @@ gst_rippletv_class_init (GstRippleTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rippletv_src_template)); gst_static_pad_template_get (&gst_rippletv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_rippletv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_rippletv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_rippletv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_rippletv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_rippletv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_rippletv_transform_frame);
setTable (); setTable ();
} }

View file

@ -53,8 +53,6 @@ struct _GstRippleTV
GstVideoFilter element; GstVideoFilter element;
/* < private > */ /* < private > */
GstVideoInfo info;
gint mode; gint mode;
gint16 *background; gint16 *background;

View file

@ -75,20 +75,14 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static gboolean static gboolean
gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_shagadelictv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstShagadelicTV *filter = GST_SHAGADELICTV (btrans); GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
GstVideoInfo info;
gint width, height, area; gint width, height, area;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
area = width * height; area = width * height;
@ -100,13 +94,6 @@ gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
gst_shagadelic_initialize (filter); gst_shagadelic_initialize (filter);
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static void static void
@ -119,9 +106,10 @@ gst_shagadelic_initialize (GstShagadelicTV * filter)
double xx, yy; double xx, yy;
#endif #endif
gint width, height; gint width, height;
GstVideoInfo *info = &GST_VIDEO_FILTER (filter)->in_info;
width = GST_VIDEO_INFO_WIDTH (&filter->info); width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (&filter->info); height = GST_VIDEO_INFO_HEIGHT (info);
i = 0; i = 0;
for (y = 0; y < height * 2; y++) { for (y = 0; y < height * 2; y++) {
@ -171,28 +159,21 @@ gst_shagadelic_initialize (GstShagadelicTV * filter)
} }
static GstFlowReturn static GstFlowReturn
gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in, gst_shagadelictv_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstShagadelicTV *filter = GST_SHAGADELICTV (trans); GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
gint x, y; gint x, y;
guint32 v; guint32 v;
guint8 r, g, b; guint8 r, g, b;
GstVideoFrame in_frame, out_frame;
gint width, height; gint width, height;
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
for (y = 0; y < height; y++) { for (y = 0; y < height; y++) {
for (x = 0; x < width; x++) { for (x = 0; x < width; x++) {
@ -226,23 +207,7 @@ gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in,
filter->bx += filter->bvx; filter->bx += filter->bvx;
filter->by += filter->bvy; filter->by += filter->bvy;
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static void static void
@ -266,7 +231,7 @@ gst_shagadelictv_class_init (GstShagadelicTVClass * klass)
{ {
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->finalize = gst_shagadelictv_finalize; gobject_class->finalize = gst_shagadelictv_finalize;
@ -280,8 +245,9 @@ gst_shagadelictv_class_init (GstShagadelicTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_shagadelictv_src_template)); gst_static_pad_template_get (&gst_shagadelictv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps); vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_info);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform); vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_shagadelictv_transform_frame);
} }
static void static void

View file

@ -51,8 +51,6 @@ struct _GstShagadelicTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
guint8 *ripple; guint8 *ripple;
guint8 *spiral; guint8 *spiral;
guint8 phase; guint8 phase;

View file

@ -78,29 +78,22 @@ GST_STATIC_PAD_TEMPLATE ("sink",
static GstFlowReturn static GstFlowReturn
gst_streaktv_transform (GstBaseTransform * trans, GstBuffer * in, gst_streaktv_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstStreakTV *filter = GST_STREAKTV (trans); GstStreakTV *filter = GST_STREAKTV (vfilter);
guint32 *src, *dest; guint32 *src, *dest;
GstVideoFrame in_frame, out_frame;
gint i, cf; gint i, cf;
gint video_area, width, height; gint video_area, width, height;
guint32 **planetable = filter->planetable; guint32 **planetable = filter->planetable;
gint plane = filter->plane; gint plane = filter->plane;
guint stride_mask, stride_shift, stride; guint stride_mask, stride_shift, stride;
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
video_area = width * height; video_area = width * height;
@ -145,40 +138,18 @@ gst_streaktv_transform (GstBaseTransform * trans, GstBuffer * in,
filter->plane = plane & (PLANES - 1); filter->plane = plane & (PLANES - 1);
GST_OBJECT_UNLOCK (filter); GST_OBJECT_UNLOCK (filter);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean
gst_streaktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_streaktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstStreakTV *filter = GST_STREAKTV (btrans); GstStreakTV *filter = GST_STREAKTV (vfilter);
GstVideoInfo info;
gint i, width, height; gint i, width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
if (filter->planebuffer) if (filter->planebuffer)
g_free (filter->planebuffer); g_free (filter->planebuffer);
@ -189,13 +160,6 @@ gst_streaktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
filter->planetable[i] = &filter->planebuffer[width * height * i]; filter->planetable[i] = &filter->planebuffer[width * height * i];
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static gboolean static gboolean
@ -265,6 +229,7 @@ gst_streaktv_class_init (GstStreakTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_streaktv_set_property; gobject_class->set_property = gst_streaktv_set_property;
gobject_class->get_property = gst_streaktv_get_property; gobject_class->get_property = gst_streaktv_get_property;
@ -287,9 +252,11 @@ gst_streaktv_class_init (GstStreakTVClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_streaktv_src_template)); gst_static_pad_template_get (&gst_streaktv_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_streaktv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_streaktv_transform);
trans_class->start = GST_DEBUG_FUNCPTR (gst_streaktv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_streaktv_start);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_streaktv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_streaktv_transform_frame);
} }
static void static void

View file

@ -55,8 +55,6 @@ struct _GstStreakTV
GstVideoFilter element; GstVideoFilter element;
/* < private > */ /* < private > */
GstVideoInfo info;
gboolean feedback; gboolean feedback;
guint32 *planebuffer; guint32 *planebuffer;

View file

@ -75,20 +75,14 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static gboolean static gboolean
gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_vertigotv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstVertigoTV *filter = GST_VERTIGOTV (btrans); GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
GstVideoInfo info;
gint area, width, height; gint area, width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
area = width * height; area = width * height;
@ -100,13 +94,6 @@ gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
filter->phase = 0; filter->phase = 0;
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static void static void
@ -117,11 +104,14 @@ gst_vertigotv_set_parms (GstVertigoTV * filter)
double x, y; double x, y;
double dizz; double dizz;
gint width, height; gint width, height;
GstVideoInfo *info;
dizz = sin (filter->phase) * 10 + sin (filter->phase * 1.9 + 5) * 5; dizz = sin (filter->phase) * 10 + sin (filter->phase * 1.9 + 5) * 5;
width = GST_VIDEO_INFO_WIDTH (&filter->info); info = &GST_VIDEO_FILTER (filter)->in_info;
height = GST_VIDEO_INFO_HEIGHT (&filter->info);
width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (info);
x = width / 2; x = width / 2;
y = height / 2; y = height / 2;
@ -162,19 +152,19 @@ gst_vertigotv_set_parms (GstVertigoTV * filter)
} }
static GstFlowReturn static GstFlowReturn
gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in, gst_vertigotv_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstVertigoTV *filter = GST_VERTIGOTV (trans); GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
guint32 *src, *dest, *p; guint32 *src, *dest, *p;
guint32 v; guint32 v;
gint x, y, ox, oy, i, width, height, area, sstride, dstride; gint x, y, ox, oy, i, width, height, area, sstride, dstride;
GstClockTime timestamp, stream_time; GstClockTime timestamp, stream_time;
GstVideoFrame in_frame, out_frame;
timestamp = GST_BUFFER_TIMESTAMP (in); timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time = stream_time =
gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp)); GST_TIME_ARGS (timestamp));
@ -182,19 +172,13 @@ gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
if (GST_CLOCK_TIME_IS_VALID (stream_time)) if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time); gst_object_sync_values (GST_OBJECT (filter), stream_time);
if (!gst_video_frame_map (&in_frame, &filter->info, in, GST_MAP_READ)) src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
goto invalid_in; sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
if (!gst_video_frame_map (&out_frame, &filter->info, out, GST_MAP_WRITE)) width = GST_VIDEO_FRAME_WIDTH (in_frame);
goto invalid_out; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0);
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (&in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (&out_frame, 0);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
area = width * height; area = width * height;
@ -234,20 +218,6 @@ gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
filter->alt_buffer = p; filter->alt_buffer = p;
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */
invalid_in:
{
GST_DEBUG_OBJECT (filter, "invalid input frame");
return GST_FLOW_ERROR;
}
invalid_out:
{
GST_DEBUG_OBJECT (filter, "invalid output frame");
gst_video_frame_unmap (&in_frame);
return GST_FLOW_ERROR;
}
} }
static gboolean static gboolean
@ -316,6 +286,7 @@ gst_vertigotv_class_init (GstVertigoTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->set_property = gst_vertigotv_set_property; gobject_class->set_property = gst_vertigotv_set_property;
gobject_class->get_property = gst_vertigotv_get_property; gobject_class->get_property = gst_vertigotv_get_property;
@ -340,8 +311,10 @@ gst_vertigotv_class_init (GstVertigoTVClass * klass)
gst_static_pad_template_get (&gst_vertigotv_src_template)); gst_static_pad_template_get (&gst_vertigotv_src_template));
trans_class->start = GST_DEBUG_FUNCPTR (gst_vertigotv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_vertigotv_start);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform); vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_vertigotv_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_vertigotv_transform_frame);
} }
static void static void

View file

@ -50,8 +50,6 @@ struct _GstVertigoTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
guint32 *buffer; guint32 *buffer;
guint32 *current_buffer, *alt_buffer; guint32 *current_buffer, *alt_buffer;
gint dx, dy; gint dx, dy;

View file

@ -82,33 +82,20 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static gboolean static gboolean
gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_warptv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstWarpTV *filter = GST_WARPTV (btrans); GstWarpTV *filter = GST_WARPTV (vfilter);
GstVideoInfo info;
gint width, height; gint width, height;
if (!gst_video_info_from_caps (&info, incaps)) width = GST_VIDEO_INFO_WIDTH (in_info);
goto invalid_caps; height = GST_VIDEO_INFO_HEIGHT (in_info);
filter->info = info;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_HEIGHT (&info);
g_free (filter->disttable); g_free (filter->disttable);
filter->disttable = g_malloc (width * height * sizeof (guint32)); filter->disttable = g_malloc (width * height * sizeof (guint32));
initDistTable (filter, width, height); initDistTable (filter, width, height);
return TRUE; return TRUE;
/* ERRORS */
invalid_caps:
{
GST_DEBUG_OBJECT (filter, "invalid caps received");
return FALSE;
}
} }
static gint32 sintable[1024 + 256]; static gint32 sintable[1024 + 256];
@ -152,9 +139,10 @@ initDistTable (GstWarpTV * filter, gint width, gint height)
} }
static GstFlowReturn static GstFlowReturn
gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) gst_warptv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
GstVideoFrame * out_frame)
{ {
GstWarpTV *warptv = GST_WARPTV (trans); GstWarpTV *warptv = GST_WARPTV (filter);
gint width, height; gint width, height;
gint xw, yw, cw; gint xw, yw, cw;
gint32 c, i, x, y, dx, dy, maxx, maxy; gint32 c, i, x, y, dx, dy, maxx, maxy;
@ -162,19 +150,15 @@ gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
gint32 *ctable; gint32 *ctable;
guint32 *src, *dest; guint32 *src, *dest;
gint sstride, dstride; gint sstride, dstride;
GstVideoFrame in_frame, out_frame;
gst_video_frame_map (&in_frame, &warptv->info, in, GST_MAP_READ); src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
gst_video_frame_map (&out_frame, &warptv->info, out, GST_MAP_WRITE); dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
src = GST_VIDEO_FRAME_PLANE_DATA (&in_frame, 0); sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) / 4;
dest = GST_VIDEO_FRAME_PLANE_DATA (&out_frame, 0); dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) / 4;
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (&in_frame, 0) / 4; width = GST_VIDEO_FRAME_WIDTH (in_frame);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (&out_frame, 0) / 4; height = GST_VIDEO_FRAME_HEIGHT (in_frame);
width = GST_VIDEO_FRAME_WIDTH (&in_frame);
height = GST_VIDEO_FRAME_HEIGHT (&in_frame);
GST_OBJECT_LOCK (warptv); GST_OBJECT_LOCK (warptv);
xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30); xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
@ -222,9 +206,6 @@ gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
warptv->tval = (warptv->tval + 1) & 511; warptv->tval = (warptv->tval + 1) & 511;
GST_OBJECT_UNLOCK (warptv); GST_OBJECT_UNLOCK (warptv);
gst_video_frame_unmap (&in_frame);
gst_video_frame_unmap (&out_frame);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@ -238,26 +219,6 @@ gst_warptv_start (GstBaseTransform * trans)
return TRUE; return TRUE;
} }
static gboolean
gst_wraptv_decide_allocation (GstBaseTransform * trans, GstQuery * query)
{
GstBufferPool *pool = NULL;
guint size, min, max, prefix, alignment;
gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
&alignment, &pool);
if (pool) {
GstStructure *config;
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
gst_buffer_pool_set_config (pool, config);
}
return TRUE;
}
static void static void
gst_warptv_finalize (GObject * object) gst_warptv_finalize (GObject * object)
{ {
@ -275,6 +236,7 @@ gst_warptv_class_init (GstWarpTVClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
gobject_class->finalize = gst_warptv_finalize; gobject_class->finalize = gst_warptv_finalize;
@ -289,10 +251,10 @@ gst_warptv_class_init (GstWarpTVClass * klass)
gst_static_pad_template_get (&gst_warptv_src_template)); gst_static_pad_template_get (&gst_warptv_src_template));
trans_class->start = GST_DEBUG_FUNCPTR (gst_warptv_start); trans_class->start = GST_DEBUG_FUNCPTR (gst_warptv_start);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps);
trans_class->decide_allocation = vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_warptv_set_info);
GST_DEBUG_FUNCPTR (gst_wraptv_decide_allocation); vfilter_class->transform_frame =
trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform); GST_DEBUG_FUNCPTR (gst_warptv_transform_frame);
initSinTable (); initSinTable ();
} }

View file

@ -50,8 +50,6 @@ struct _GstWarpTV
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo info;
gint32 *disttable; gint32 *disttable;
gint32 ctable[1024]; gint32 ctable[1024];
gint tval; gint tval;

View file

@ -94,10 +94,10 @@ static void gst_gamma_set_property (GObject * object, guint prop_id,
static void gst_gamma_get_property (GObject * object, guint prop_id, static void gst_gamma_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
static gboolean gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps, static gboolean gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps); GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info);
static GstFlowReturn gst_gamma_transform_ip (GstBaseTransform * transform, static GstFlowReturn gst_gamma_transform_frame_ip (GstVideoFilter * vfilter,
GstBuffer * buf); GstVideoFrame * frame);
static void gst_gamma_before_transform (GstBaseTransform * transform, static void gst_gamma_before_transform (GstBaseTransform * transform,
GstBuffer * buf); GstBuffer * buf);
@ -111,6 +111,7 @@ gst_gamma_class_init (GstGammaClass * g_class)
GObjectClass *gobject_class = (GObjectClass *) g_class; GObjectClass *gobject_class = (GObjectClass *) g_class;
GstElementClass *gstelement_class = (GstElementClass *) g_class; GstElementClass *gstelement_class = (GstElementClass *) g_class;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) g_class;
GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma"); GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma");
@ -131,10 +132,12 @@ gst_gamma_class_init (GstGammaClass * g_class)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_gamma_src_template)); gst_static_pad_template_get (&gst_gamma_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_gamma_set_caps);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_gamma_transform_ip);
trans_class->before_transform = trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_gamma_before_transform); GST_DEBUG_FUNCPTR (gst_gamma_before_transform);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_gamma_set_info);
vfilter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_gamma_transform_frame_ip);
} }
static void static void
@ -318,22 +321,16 @@ gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
} }
static gboolean static gboolean
gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps, gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstGamma *gamma = GST_GAMMA (base); GstGamma *gamma = GST_GAMMA (vfilter);
GstVideoInfo info;
GST_DEBUG_OBJECT (gamma, GST_DEBUG_OBJECT (gamma,
"setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, "setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps,
outcaps); outcaps);
if (!gst_video_info_from_caps (&info, incaps)) switch (GST_VIDEO_INFO_FORMAT (in_info)) {
goto invalid_caps;
gamma->info = info;
switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B: case GST_VIDEO_FORMAT_Y41B:
@ -365,9 +362,9 @@ gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps,
goto invalid_caps; goto invalid_caps;
break; break;
} }
return TRUE; return TRUE;
/* ERRORS */
invalid_caps: invalid_caps:
{ {
GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps); GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
@ -393,36 +390,24 @@ gst_gamma_before_transform (GstBaseTransform * base, GstBuffer * outbuf)
} }
static GstFlowReturn static GstFlowReturn
gst_gamma_transform_ip (GstBaseTransform * base, GstBuffer * outbuf) gst_gamma_transform_frame_ip (GstVideoFilter * vfilter, GstVideoFrame * frame)
{ {
GstGamma *gamma = GST_GAMMA (base); GstGamma *gamma = GST_GAMMA (vfilter);
GstVideoFrame frame;
if (!gamma->process) if (!gamma->process)
goto not_negotiated; goto not_negotiated;
if (base->passthrough) if (GST_BASE_TRANSFORM (vfilter)->passthrough)
goto done; goto done;
if (!gst_video_frame_map (&frame, &gamma->info, outbuf, GST_MAP_READWRITE))
goto wrong_buffer;
GST_OBJECT_LOCK (gamma); GST_OBJECT_LOCK (gamma);
gamma->process (gamma, &frame); gamma->process (gamma, frame);
GST_OBJECT_UNLOCK (gamma); GST_OBJECT_UNLOCK (gamma);
gst_video_frame_unmap (&frame);
done: done:
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */ /* ERRORS */
wrong_buffer:
{
GST_ELEMENT_ERROR (gamma, STREAM, FORMAT,
(NULL), ("Invalid buffer received"));
return GST_FLOW_ERROR;
}
not_negotiated: not_negotiated:
{ {
GST_ERROR_OBJECT (gamma, "Not negotiated yet"); GST_ERROR_OBJECT (gamma, "Not negotiated yet");

View file

@ -54,10 +54,6 @@ struct _GstGamma
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
/* format */
GstVideoInfo info;
/* properties */ /* properties */
gdouble gamma; gdouble gamma;

View file

@ -354,21 +354,17 @@ gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
/* get notified of caps and plug in the correct process function */ /* get notified of caps and plug in the correct process function */
static gboolean static gboolean
gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps, gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base); GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
GstVideoInfo info;
GST_DEBUG_OBJECT (videobalance, GST_DEBUG_OBJECT (videobalance,
"in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps); "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
videobalance->process = NULL; videobalance->process = NULL;
if (!gst_video_info_from_caps (&info, incaps)) switch (GST_VIDEO_INFO_FORMAT (in_info)) {
goto invalid_caps;
switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y41B: case GST_VIDEO_FORMAT_Y41B:
@ -399,15 +395,9 @@ gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
break; break;
} }
videobalance->info = info;
return TRUE; return TRUE;
invalid_caps: /* ERRORS */
{
GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
return FALSE;
}
unknown_format: unknown_format:
{ {
GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps); GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
@ -433,38 +423,26 @@ gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
} }
static GstFlowReturn static GstFlowReturn
gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf) gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
GstVideoFrame * frame)
{ {
GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base); GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
GstVideoFrame frame;
if (!videobalance->process) if (!videobalance->process)
goto not_negotiated; goto not_negotiated;
/* if no change is needed, we are done */ /* if no change is needed, we are done */
if (base->passthrough) if (GST_BASE_TRANSFORM (vfilter)->passthrough)
goto done; goto done;
if (!gst_video_frame_map (&frame, &videobalance->info, outbuf,
GST_MAP_READWRITE))
goto wrong_frame;
GST_OBJECT_LOCK (videobalance); GST_OBJECT_LOCK (videobalance);
videobalance->process (videobalance, &frame); videobalance->process (videobalance, frame);
GST_OBJECT_UNLOCK (videobalance); GST_OBJECT_UNLOCK (videobalance);
gst_video_frame_unmap (&frame);
done: done:
return GST_FLOW_OK; return GST_FLOW_OK;
/* ERRORS */ /* ERRORS */
wrong_frame:
{
GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
(NULL), ("Invalid buffer received"));
return GST_FLOW_ERROR;
}
not_negotiated: not_negotiated:
{ {
GST_ERROR_OBJECT (videobalance, "Not negotiated yet"); GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
@ -501,6 +479,7 @@ gst_video_balance_class_init (GstVideoBalanceClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0, GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
"videobalance"); "videobalance");
@ -535,11 +514,12 @@ gst_video_balance_class_init (GstVideoBalanceClass * klass)
gst_element_class_add_pad_template (gstelement_class, gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_video_balance_src_template)); gst_static_pad_template_get (&gst_video_balance_src_template));
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_balance_set_caps);
trans_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_video_balance_transform_ip);
trans_class->before_transform = trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_video_balance_before_transform); GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
vfilter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
} }
static void static void

View file

@ -60,9 +60,6 @@ struct _GstVideoBalance {
gdouble hue; gdouble hue;
gdouble saturation; gdouble saturation;
/* format */
GstVideoInfo info;
/* tables */ /* tables */
guint8 tabley[256]; guint8 tabley[256];
guint8 *tableu[256]; guint8 *tableu[256];

View file

@ -180,24 +180,6 @@ gst_video_flip_transform_caps (GstBaseTransform * trans,
return ret; return ret;
} }
static gboolean
gst_video_flip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
gsize * size)
{
GstVideoInfo info;
if (!gst_video_info_from_caps (&info, caps))
return FALSE;
*size = info.size;
GST_DEBUG_OBJECT (btrans,
"our frame size is %" G_GSIZE_FORMAT " bytes (%dx%d)", *size, info.width,
info.height);
return TRUE;
}
static void static void
gst_video_flip_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest, gst_video_flip_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest,
const GstVideoFrame * src) const GstVideoFrame * src)
@ -765,20 +747,15 @@ gst_video_flip_y422 (GstVideoFlip * videoflip, GstVideoFrame * dest,
static gboolean static gboolean
gst_video_flip_set_caps (GstBaseTransform * btrans, GstCaps * incaps, gst_video_flip_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
GstCaps * outcaps) GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{ {
GstVideoFlip *vf = GST_VIDEO_FLIP (btrans); GstVideoFlip *vf = GST_VIDEO_FLIP (vfilter);
GstVideoInfo in_info, out_info;
gboolean ret = FALSE; gboolean ret = FALSE;
vf->process = NULL; vf->process = NULL;
if (!gst_video_info_from_caps (&in_info, incaps) if (GST_VIDEO_INFO_FORMAT (in_info) != GST_VIDEO_INFO_FORMAT (out_info))
|| !gst_video_info_from_caps (&out_info, outcaps))
goto invalid_caps;
if (GST_VIDEO_INFO_FORMAT (&in_info) != GST_VIDEO_INFO_FORMAT (&out_info))
goto invalid_caps; goto invalid_caps;
/* Check that they are correct */ /* Check that they are correct */
@ -787,11 +764,11 @@ gst_video_flip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
case GST_VIDEO_FLIP_METHOD_90L: case GST_VIDEO_FLIP_METHOD_90L:
case GST_VIDEO_FLIP_METHOD_TRANS: case GST_VIDEO_FLIP_METHOD_TRANS:
case GST_VIDEO_FLIP_METHOD_OTHER: case GST_VIDEO_FLIP_METHOD_OTHER:
if ((in_info.width != out_info.height) || if ((in_info->width != out_info->height) ||
(in_info.height != out_info.width)) { (in_info->height != out_info->width)) {
GST_ERROR_OBJECT (vf, "we are inverting width and height but caps " GST_ERROR_OBJECT (vf, "we are inverting width and height but caps "
"are not correct : %dx%d to %dx%d", in_info.width, "are not correct : %dx%d to %dx%d", in_info->width,
in_info.height, out_info.width, out_info.height); in_info->height, out_info->width, out_info->height);
goto beach; goto beach;
} }
break; break;
@ -801,11 +778,11 @@ gst_video_flip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
case GST_VIDEO_FLIP_METHOD_180: case GST_VIDEO_FLIP_METHOD_180:
case GST_VIDEO_FLIP_METHOD_HORIZ: case GST_VIDEO_FLIP_METHOD_HORIZ:
case GST_VIDEO_FLIP_METHOD_VERT: case GST_VIDEO_FLIP_METHOD_VERT:
if ((in_info.width != out_info.width) || if ((in_info->width != out_info->width) ||
(in_info.height != out_info.height)) { (in_info->height != out_info->height)) {
GST_ERROR_OBJECT (vf, "we are keeping width and height but caps " GST_ERROR_OBJECT (vf, "we are keeping width and height but caps "
"are not correct : %dx%d to %dx%d", in_info.width, "are not correct : %dx%d to %dx%d", in_info->width,
in_info.height, out_info.width, out_info.height); in_info->height, out_info->width, out_info->height);
goto beach; goto beach;
} }
break; break;
@ -815,10 +792,8 @@ gst_video_flip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
} }
ret = TRUE; ret = TRUE;
vf->in_info = in_info;
vf->out_info = out_info;
switch (GST_VIDEO_INFO_FORMAT (&in_info)) { switch (GST_VIDEO_INFO_FORMAT (in_info)) {
case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y444:
@ -873,34 +848,21 @@ gst_video_flip_before_transform (GstBaseTransform * trans, GstBuffer * in)
} }
static GstFlowReturn static GstFlowReturn
gst_video_flip_transform (GstBaseTransform * trans, GstBuffer * in, gst_video_flip_transform_frame (GstVideoFilter * vfilter,
GstBuffer * out) GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{ {
GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans); GstVideoFlip *videoflip = GST_VIDEO_FLIP (vfilter);
GstVideoFrame dest;
GstVideoFrame src;
if (G_UNLIKELY (videoflip->process == NULL)) if (G_UNLIKELY (videoflip->process == NULL))
goto not_negotiated; goto not_negotiated;
if (!gst_video_frame_map (&src, &videoflip->in_info, in, GST_MAP_READ)) GST_LOG_OBJECT (videoflip, "videoflip: flipping (%s)",
goto invalid_in;
if (!gst_video_frame_map (&dest, &videoflip->out_info, out, GST_MAP_WRITE))
goto invalid_out;
GST_LOG_OBJECT (videoflip, "videoflip: flipping %dx%d to %dx%d (%s)",
videoflip->in_info.width, videoflip->in_info.height,
videoflip->out_info.width, videoflip->out_info.height,
video_flip_methods[videoflip->method].value_nick); video_flip_methods[videoflip->method].value_nick);
GST_OBJECT_LOCK (videoflip); GST_OBJECT_LOCK (videoflip);
videoflip->process (videoflip, &dest, &src); videoflip->process (videoflip, out_frame, in_frame);
GST_OBJECT_UNLOCK (videoflip); GST_OBJECT_UNLOCK (videoflip);
gst_video_frame_unmap (&src);
gst_video_frame_unmap (&dest);
return GST_FLOW_OK; return GST_FLOW_OK;
not_negotiated: not_negotiated:
@ -908,17 +870,6 @@ not_negotiated:
GST_ERROR_OBJECT (videoflip, "Not negotiated yet"); GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
return GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
} }
invalid_in:
{
GST_ERROR_OBJECT (videoflip, "invalid input frame");
return GST_FLOW_OK;
}
invalid_out:
{
GST_ERROR_OBJECT (videoflip, "invalid output frame");
gst_video_frame_unmap (&src);
return GST_FLOW_OK;
}
} }
static gboolean static gboolean
@ -928,6 +879,7 @@ gst_video_flip_src_event (GstBaseTransform * trans, GstEvent * event)
gdouble new_x, new_y, x, y; gdouble new_x, new_y, x, y;
GstStructure *structure; GstStructure *structure;
gboolean ret; gboolean ret;
GstVideoInfo *out_info = &GST_VIDEO_FILTER (trans)->out_info;
GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event)); GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
@ -943,31 +895,31 @@ gst_video_flip_src_event (GstBaseTransform * trans, GstEvent * event)
switch (vf->method) { switch (vf->method) {
case GST_VIDEO_FLIP_METHOD_90R: case GST_VIDEO_FLIP_METHOD_90R:
new_x = y; new_x = y;
new_y = vf->out_info.width - x; new_y = out_info->width - x;
break; break;
case GST_VIDEO_FLIP_METHOD_90L: case GST_VIDEO_FLIP_METHOD_90L:
new_x = vf->out_info.height - y; new_x = out_info->height - y;
new_y = x; new_y = x;
break; break;
case GST_VIDEO_FLIP_METHOD_OTHER: case GST_VIDEO_FLIP_METHOD_OTHER:
new_x = vf->out_info.height - y; new_x = out_info->height - y;
new_y = vf->out_info.width - x; new_y = out_info->width - x;
break; break;
case GST_VIDEO_FLIP_METHOD_TRANS: case GST_VIDEO_FLIP_METHOD_TRANS:
new_x = y; new_x = y;
new_y = x; new_y = x;
break; break;
case GST_VIDEO_FLIP_METHOD_180: case GST_VIDEO_FLIP_METHOD_180:
new_x = vf->out_info.width - x; new_x = out_info->width - x;
new_y = vf->out_info.height - y; new_y = out_info->height - y;
break; break;
case GST_VIDEO_FLIP_METHOD_HORIZ: case GST_VIDEO_FLIP_METHOD_HORIZ:
new_x = vf->out_info.width - x; new_x = out_info->width - x;
new_y = y; new_y = y;
break; break;
case GST_VIDEO_FLIP_METHOD_VERT: case GST_VIDEO_FLIP_METHOD_VERT:
new_x = x; new_x = x;
new_y = vf->out_info.height - y; new_y = out_info->height - y;
break; break;
default: default:
new_x = x; new_x = x;
@ -1047,6 +999,7 @@ gst_video_flip_class_init (GstVideoFlipClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *gstelement_class = (GstElementClass *) klass; GstElementClass *gstelement_class = (GstElementClass *) klass;
GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass; GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip"); GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip");
@ -1069,12 +1022,13 @@ gst_video_flip_class_init (GstVideoFlipClass * klass)
trans_class->transform_caps = trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps); GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_flip_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_video_flip_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_video_flip_transform);
trans_class->before_transform = trans_class->before_transform =
GST_DEBUG_FUNCPTR (gst_video_flip_before_transform); GST_DEBUG_FUNCPTR (gst_video_flip_before_transform);
trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_flip_src_event); trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_flip_src_event);
vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_flip_set_info);
vfilter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_video_flip_transform_frame);
} }
static void static void

View file

@ -71,11 +71,8 @@ typedef struct _GstVideoFlipClass GstVideoFlipClass;
*/ */
struct _GstVideoFlip { struct _GstVideoFlip {
GstVideoFilter videofilter; GstVideoFilter videofilter;
/* < private > */ /* < private > */
GstVideoInfo in_info;
GstVideoInfo out_info;
GstVideoFlipMethod method; GstVideoFlipMethod method;
void (*process) (GstVideoFlip *videoflip, GstVideoFrame *dest, const GstVideoFrame *src); void (*process) (GstVideoFlip *videoflip, GstVideoFrame *dest, const GstVideoFrame *src);
}; };