video: port to new API

Add support for palette again.
Rewrite setup code for videoconvert using the new video methods.
This commit is contained in:
Wim Taymans 2011-06-17 09:21:27 +02:00
parent 1aa7ad7dae
commit 6a254de438
9 changed files with 127 additions and 196 deletions

View file

@ -608,9 +608,8 @@ collect_information (GstDiscoverer * dc, const GstStructure * st,
GstCaps *caps;
GstStructure *caps_st, *tags_st;
const gchar *name;
int tmp, tmp2;
int tmp;
guint utmp;
gboolean btmp;
if (!st || !gst_structure_id_has_field (st, _CAPS_QUARK)) {
GST_WARNING ("Couldn't find caps !");
@ -667,7 +666,7 @@ collect_information (GstDiscoverer * dc, const GstStructure * st,
} else if (g_str_has_prefix (name, "video/") ||
g_str_has_prefix (name, "image/")) {
GstDiscovererVideoInfo *info;
GstVideoFormat format;
GstVideoInfo vinfo;
if (parent)
info = (GstDiscovererVideoInfo *) parent;
@ -677,27 +676,21 @@ collect_information (GstDiscoverer * dc, const GstStructure * st,
info->parent.caps = caps;
}
if (gst_video_format_parse_caps (caps, &format, &tmp, &tmp2)) {
info->width = (guint) tmp;
info->height = (guint) tmp2;
if (gst_video_info_from_caps (&vinfo, caps)) {
info->width = (guint) vinfo.width;
info->height = (guint) vinfo.height;
info->depth = (guint) 0;
info->par_num = vinfo.par_n;
info->par_denom = vinfo.par_d;
info->framerate_num = vinfo.fps_n;
info->framerate_denom = vinfo.fps_d;
info->interlaced = (vinfo.flags & GST_VIDEO_FLAG_INTERLACED) != 0;
}
if (gst_structure_get_int (caps_st, "depth", &tmp))
info->depth = (guint) tmp;
if (gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp, &tmp2)) {
info->par_num = tmp;
info->par_denom = tmp2;
}
if (gst_video_parse_caps_framerate (caps, &tmp, &tmp2)) {
info->framerate_num = tmp;
info->framerate_denom = tmp2;
}
if (gst_video_format_parse_caps_interlaced (caps, &btmp))
info->interlaced = btmp;
if (gst_structure_id_has_field (st, _TAGS_QUARK)) {
gst_structure_id_get (st, _TAGS_QUARK,
GST_TYPE_STRUCTURE, &tags_st, NULL);

View file

@ -37,35 +37,24 @@ gst_meta_video_get_info (void)
GstMetaVideo *
gst_buffer_add_meta_video (GstBuffer * buffer, GstMetaVideoFlags flags,
gst_buffer_add_meta_video (GstBuffer * buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height)
{
GstMetaVideo *meta;
guint i;
GstMetaVideoPlane plane[GST_VIDEO_MAX_PLANES];
GstVideoInfo info;
for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
gint offset;
offset = gst_video_format_get_component_offset (format, i, width, height);
/* end when the offset is 0 for something else that the first component */
if (i > 0 && offset == 0)
break;
plane[i].offset = offset;
plane[i].stride = gst_video_format_get_row_stride (format, i, width);
}
gst_video_info_set_format (&info, format, width, height);
meta = gst_buffer_add_meta_video_full (buffer, flags, format, width, height,
i, plane);
info.n_planes, info.plane);
return meta;
}
GstMetaVideo *
gst_buffer_add_meta_video_full (GstBuffer * buffer, GstMetaVideoFlags flags,
gst_buffer_add_meta_video_full (GstBuffer * buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height,
guint n_planes, GstMetaVideoPlane plane[GST_VIDEO_MAX_PLANES])
guint n_planes, GstVideoPlane plane[GST_VIDEO_MAX_PLANES])
{
GstMetaVideo *meta;
guint i;

View file

@ -32,41 +32,6 @@ G_BEGIN_DECLS
typedef struct _GstMetaVideo GstMetaVideo;
typedef struct _GstMetaVideoPlane GstMetaVideoPlane;
/**
* GstMetaVideoFlags:
* @GST_META_VIDEO_FLAG_NONE: no flags
* @GST_META_VIDEO_FLAG_INTERLACED:
* @GST_META_VIDEO_FLAG_TTF:
* @GST_META_VIDEO_FLAG_RFF:
* @GST_META_VIDEO_FLAG_ONEFIELD:
* @GST_META_VIDEO_FLAG_TELECINE:
* @GST_META_VIDEO_FLAG_PROGRESSIVE:
*
* Extra video flags
*/
typedef enum {
GST_META_VIDEO_FLAG_NONE = 0,
GST_META_VIDEO_FLAG_INTERLACED = (1 << 0),
GST_META_VIDEO_FLAG_TTF = (1 << 1),
GST_META_VIDEO_FLAG_RFF = (1 << 2),
GST_META_VIDEO_FLAG_ONEFIELD = (1 << 3),
GST_META_VIDEO_FLAG_TELECINE = (1 << 4),
GST_META_VIDEO_FLAG_PROGRESSIVE = (1 << 5)
} GstMetaVideoFlags;
/**
* GstMetaVideoPlane:
* @offset: offset of the first pixel in the buffer memory region
* @stride: stride of the image lines. Can be negative when the image is
* upside-down
*
* Information for one video plane.
*/
struct _GstMetaVideoPlane {
gsize offset;
gint stride;
};
/**
* GstMetaVideo:
* @meta: parent #GstMeta
@ -81,16 +46,15 @@ struct _GstMetaVideoPlane {
struct _GstMetaVideo {
GstMeta meta;
GstMetaVideoFlags flags;
GstBuffer *buffer;
GstVideoFlags flags;
GstVideoFormat format;
guint width;
guint height;
guint n_planes;
GstMetaVideoPlane plane[GST_VIDEO_MAX_PLANES];
GstVideoPlane plane[GST_VIDEO_MAX_PLANES];
gpointer (*map) (GstMetaVideo *meta, guint plane, gint *stride,
GstMapFlags flags);
@ -100,11 +64,11 @@ struct _GstMetaVideo {
const GstMetaInfo * gst_meta_video_get_info (void);
#define gst_buffer_get_meta_video(b) ((GstMetaVideo*)gst_buffer_get_meta((b),GST_META_INFO_VIDEO))
GstMetaVideo * gst_buffer_add_meta_video (GstBuffer *buffer, GstMetaVideoFlags flags,
GstMetaVideo * gst_buffer_add_meta_video (GstBuffer *buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height);
GstMetaVideo * gst_buffer_add_meta_video_full (GstBuffer *buffer, GstMetaVideoFlags flags,
GstMetaVideo * gst_buffer_add_meta_video_full (GstBuffer *buffer, GstVideoFlags flags,
GstVideoFormat format, guint width, guint height,
guint n_planes, GstMetaVideoPlane plane[GST_VIDEO_MAX_PLANES]);
guint n_planes, GstVideoPlane plane[GST_VIDEO_MAX_PLANES]);
gpointer gst_meta_video_map (GstMetaVideo *meta, guint plane, gint *stride,
GstMapFlags flags);

View file

@ -77,15 +77,14 @@ static gboolean
gst_video_filter_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
gsize * size)
{
GstVideoFormat fmt;
gint width, height;
GstVideoInfo info;
if (!gst_video_format_parse_caps (caps, &fmt, &width, &height)) {
if (!gst_video_info_from_caps (&info, caps)) {
GST_WARNING_OBJECT (btrans, "Failed to parse caps %" GST_PTR_FORMAT, caps);
return FALSE;
}
*size = gst_video_format_get_size (fmt, width, height);
*size = info.size;
GST_DEBUG_OBJECT (btrans, "Returning size %u bytes for caps %"
GST_PTR_FORMAT, *size, caps);

View file

@ -713,6 +713,7 @@ gst_video_info_set_format (GstVideoInfo * info, GstVideoFormat format,
g_return_if_fail (info != NULL);
g_return_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN);
info->flags = 0;
info->format = format;
info->width = width;
info->height = height;
@ -1847,7 +1848,36 @@ gst_video_event_parse_still_frame (GstEvent * event, gboolean * in_still)
return TRUE;
}
#if 0
/**
* gst_video_parse_caps_framerate:
* @caps: pointer to a #GstCaps instance
* @fps_n: pointer to integer to hold numerator of frame rate (output)
* @fps_d: pointer to integer to hold denominator of frame rate (output)
*
* Extracts the frame rate from @caps and places the values in the locations
* pointed to by @fps_n and @fps_d. Returns TRUE if the values could be
* parsed correctly, FALSE if not.
*
* This function can be used with #GstCaps that have any media type; it
* is not limited to formats handled by #GstVideoFormat.
*
* Since: 0.10.16
*
* Returns: TRUE if @caps was parsed correctly.
*/
gboolean
gst_video_parse_caps_framerate (GstCaps * caps, int *fps_n, int *fps_d)
{
GstStructure *structure;
if (!gst_caps_is_fixed (caps))
return FALSE;
structure = gst_caps_get_structure (caps, 0);
return gst_structure_get_fraction (structure, "framerate", fps_n, fps_d);
}
/**
* gst_video_parse_caps_palette:
* @caps: #GstCaps to parse
@ -1879,4 +1909,3 @@ gst_video_parse_caps_palette (GstCaps * caps)
return p;
}
#endif

View file

@ -217,6 +217,7 @@ struct _GstVideoInfo {
const gchar *color_matrix;
const gchar *chroma_site;
GstBuffer *palette;
guint par_n;
guint par_d;
@ -319,6 +320,9 @@ gboolean gst_video_calculate_display_ratio (guint * dar_n,
guint display_par_n,
guint display_par_d);
gboolean gst_video_parse_caps_framerate (GstCaps * caps, int *fps_n, int *fps_d);
GstBuffer * gst_video_parse_caps_palette (GstCaps * caps);
#if 0
int gst_video_format_get_component_width (GstVideoFormat format,
int component,

View file

@ -165,15 +165,8 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
GstCaps * outcaps)
{
GstVideoConvert *space;
GstVideoFormat in_format;
GstVideoFormat out_format;
gint in_height, in_width;
gint out_height, out_width;
gint in_fps_n, in_fps_d, in_par_n, in_par_d;
gint out_fps_n, out_fps_d, out_par_n, out_par_d;
gboolean have_in_par, have_out_par;
gboolean have_in_interlaced, have_out_interlaced;
gboolean in_interlaced, out_interlaced;
GstVideoInfo in_info;
GstVideoInfo out_info;
gboolean ret;
ColorSpaceColorSpec in_spec, out_spec;
@ -185,25 +178,14 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
/* input caps */
ret = gst_video_format_parse_caps (incaps, &in_format, &in_width, &in_height);
ret = gst_video_info_from_caps (&in_info, incaps);
if (!ret)
goto no_width_height;
goto invalid_caps;
ret = gst_video_parse_caps_framerate (incaps, &in_fps_n, &in_fps_d);
if (!ret)
goto no_framerate;
have_in_par = gst_video_parse_caps_pixel_aspect_ratio (incaps,
&in_par_n, &in_par_d);
have_in_interlaced = gst_video_format_parse_caps_interlaced (incaps,
&in_interlaced);
if (gst_video_format_is_rgb (in_format)) {
if (gst_video_format_is_rgb (in_info.format)) {
in_spec = COLOR_SPEC_RGB;
} else if (gst_video_format_is_yuv (in_format)) {
const gchar *matrix = gst_video_parse_caps_color_matrix (incaps);
if (matrix && g_str_equal (matrix, "hdtv"))
} else if (gst_video_format_is_yuv (in_info.format)) {
if (in_info.color_matrix && g_str_equal (in_info.color_matrix, "hdtv"))
in_spec = COLOR_SPEC_YUV_BT709;
else
in_spec = COLOR_SPEC_YUV_BT470_6;
@ -213,27 +195,14 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
/* output caps */
ret =
gst_video_format_parse_caps (outcaps, &out_format, &out_width,
&out_height);
ret = gst_video_info_from_caps (&out_info, outcaps);
if (!ret)
goto no_width_height;
goto invalid_caps;
ret = gst_video_parse_caps_framerate (outcaps, &out_fps_n, &out_fps_d);
if (!ret)
goto no_framerate;
have_out_par = gst_video_parse_caps_pixel_aspect_ratio (outcaps,
&out_par_n, &out_par_d);
have_out_interlaced = gst_video_format_parse_caps_interlaced (incaps,
&out_interlaced);
if (gst_video_format_is_rgb (out_format)) {
if (gst_video_format_is_rgb (out_info.format)) {
out_spec = COLOR_SPEC_RGB;
} else if (gst_video_format_is_yuv (out_format)) {
const gchar *matrix = gst_video_parse_caps_color_matrix (outcaps);
if (matrix && g_str_equal (matrix, "hdtv"))
} else if (gst_video_format_is_yuv (out_info.format)) {
if (out_info.color_matrix && g_str_equal (out_info.color_matrix, "hdtv"))
out_spec = COLOR_SPEC_YUV_BT709;
else
out_spec = COLOR_SPEC_YUV_BT470_6;
@ -242,38 +211,38 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
}
/* these must match */
if (in_width != out_width || in_height != out_height ||
in_fps_n != out_fps_n || in_fps_d != out_fps_d)
if (in_info.width != out_info.width || in_info.height != out_info.height ||
in_info.fps_n != out_info.fps_n || in_info.fps_d != out_info.fps_d)
goto format_mismatch;
/* if present, these must match too */
if (have_in_par && have_out_par &&
(in_par_n != out_par_n || in_par_d != out_par_d))
if (in_info.par_n != out_info.par_n || in_info.par_d != out_info.par_d)
goto format_mismatch;
/* if present, these must match too */
if (have_in_interlaced && have_out_interlaced &&
in_interlaced != out_interlaced)
if ((in_info.flags & GST_VIDEO_FLAG_INTERLACED) !=
(out_info.flags & GST_VIDEO_FLAG_INTERLACED))
goto format_mismatch;
space->from_format = in_format;
space->from_info = in_info;
space->from_spec = in_spec;
space->to_format = out_format;
space->to_info = out_info;
space->to_spec = out_spec;
space->width = in_width;
space->height = in_height;
space->interlaced = in_interlaced;
space->width = in_info.width;
space->height = in_info.height;
space->interlaced = (in_info.flags & GST_VIDEO_FLAG_INTERLACED) != 0;
space->convert = videoconvert_convert_new (out_format, out_spec, in_format,
in_spec, in_width, in_height);
space->convert =
videoconvert_convert_new (out_info.format, out_spec, in_info.format,
in_spec, in_info.width, in_info.height);
if (space->convert) {
videoconvert_convert_set_interlaced (space->convert, in_interlaced);
videoconvert_convert_set_interlaced (space->convert, space->interlaced);
}
/* palette, only for from data */
if (space->from_format == GST_VIDEO_FORMAT_RGB8_PALETTED &&
space->to_format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
if (space->from_info.format == GST_VIDEO_FORMAT_RGB8_PALETTED &&
space->to_info.format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
goto format_mismatch;
} else if (space->from_format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
} else if (space->from_info.format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
GstBuffer *palette;
guint32 *data;
@ -290,7 +259,7 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
gst_buffer_unmap (palette, data, -1);
gst_buffer_unref (palette);
} else if (space->to_format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
} else if (space->to_info.format == GST_VIDEO_FORMAT_RGB8_PALETTED) {
const guint32 *palette;
GstBuffer *p_buf;
@ -302,37 +271,31 @@ gst_video_convert_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
gst_buffer_unref (p_buf);
}
GST_DEBUG ("reconfigured %d %d", space->from_format, space->to_format);
GST_DEBUG ("reconfigured %d %d", space->from_info.format,
space->to_info.format);
return TRUE;
/* ERRORS */
no_width_height:
invalid_caps:
{
GST_ERROR_OBJECT (space, "did not specify width or height");
space->from_format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_format = GST_VIDEO_FORMAT_UNKNOWN;
return FALSE;
}
no_framerate:
{
GST_ERROR_OBJECT (space, "did not specify framerate");
space->from_format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_format = GST_VIDEO_FORMAT_UNKNOWN;
GST_ERROR_OBJECT (space, "invalid caps");
space->from_info.format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_info.format = GST_VIDEO_FORMAT_UNKNOWN;
return FALSE;
}
format_mismatch:
{
GST_ERROR_OBJECT (space, "input and output formats do not match");
space->from_format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_format = GST_VIDEO_FORMAT_UNKNOWN;
space->from_info.format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_info.format = GST_VIDEO_FORMAT_UNKNOWN;
return FALSE;
}
invalid_palette:
{
GST_ERROR_OBJECT (space, "invalid palette");
space->from_format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_format = GST_VIDEO_FORMAT_UNKNOWN;
space->from_info.format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_info.format = GST_VIDEO_FORMAT_UNKNOWN;
return FALSE;
}
}
@ -394,8 +357,8 @@ gst_video_convert_class_init (GstVideoConvertClass * klass)
static void
gst_video_convert_init (GstVideoConvert * space)
{
space->from_format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_format = GST_VIDEO_FORMAT_UNKNOWN;
space->from_info.format = GST_VIDEO_FORMAT_UNKNOWN;
space->to_info.format = GST_VIDEO_FORMAT_UNKNOWN;
}
void
@ -439,14 +402,13 @@ gst_video_convert_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
gsize * size)
{
gboolean ret = TRUE;
GstVideoFormat format;
gint width, height;
GstVideoInfo info;
g_assert (size);
ret = gst_video_format_parse_caps (caps, &format, &width, &height);
ret = gst_video_info_from_caps (&info, caps);
if (ret) {
*size = gst_video_format_get_size (format, width, height);
*size = info.size;
}
return ret;
@ -459,13 +421,15 @@ gst_video_convert_transform (GstBaseTransform * btrans, GstBuffer * inbuf,
GstVideoConvert *space;
guint8 *indata, *outdata;
gsize insize, outsize;
gint i;
space = GST_VIDEO_CONVERT_CAST (btrans);
GST_DEBUG ("from %d -> to %d", space->from_format, space->to_format);
GST_DEBUG ("from %d -> to %d", space->from_info.format,
space->to_info.format);
if (G_UNLIKELY (space->from_format == GST_VIDEO_FORMAT_UNKNOWN ||
space->to_format == GST_VIDEO_FORMAT_UNKNOWN))
if (G_UNLIKELY (space->from_info.format == GST_VIDEO_FORMAT_UNKNOWN ||
space->to_info.format == GST_VIDEO_FORMAT_UNKNOWN))
goto unknown_format;
videoconvert_convert_set_dither (space->convert, space->dither);
@ -473,13 +437,22 @@ gst_video_convert_transform (GstBaseTransform * btrans, GstBuffer * inbuf,
indata = gst_buffer_map (inbuf, &insize, NULL, GST_MAP_READ);
outdata = gst_buffer_map (outbuf, &outsize, NULL, GST_MAP_WRITE);
for (i = 0; i < space->to_info.n_planes; i++) {
space->convert->dest_stride[i] = space->to_info.plane[i].stride;
space->convert->dest_offset[i] = space->to_info.plane[i].offset;
space->convert->src_stride[i] = space->from_info.plane[i].stride;
space->convert->src_offset[i] = space->from_info.plane[i].offset;
}
videoconvert_convert_convert (space->convert, outdata, indata);
gst_buffer_unmap (outbuf, outdata, outsize);
gst_buffer_unmap (inbuf, indata, insize);
/* baseclass copies timestamps */
GST_DEBUG ("from %d -> to %d done", space->from_format, space->to_format);
GST_DEBUG ("from %d -> to %d done", space->from_info.format,
space->to_info.format);
return GST_FLOW_OK;

View file

@ -49,11 +49,11 @@ struct _GstVideoConvert {
gint width, height;
gboolean interlaced;
gfloat fps;
GstVideoFormat from_format;
GstVideoInfo from_info;
GstVideoInfo to_info;
ColorSpaceColorSpec from_spec;
GstVideoFormat to_format;
ColorSpaceColorSpec to_spec;
VideoConvert *convert;

View file

@ -84,26 +84,6 @@ videoconvert_convert_new (GstVideoFormat to_format, ColorSpaceColorSpec to_spec,
convert->use_16bit = FALSE;
}
for (i = 0; i < 4; i++) {
convert->dest_stride[i] = gst_video_format_get_row_stride (to_format, i,
width);
convert->dest_offset[i] = gst_video_format_get_component_offset (to_format,
i, width, height);
if (i == 0)
convert->dest_offset[i] = 0;
convert->src_stride[i] = gst_video_format_get_row_stride (from_format, i,
width);
convert->src_offset[i] = gst_video_format_get_component_offset (from_format,
i, width, height);
if (i == 0)
convert->src_offset[i] = 0;
GST_DEBUG ("%d: dest %d %d src %d %d", i,
convert->dest_stride[i], convert->dest_offset[i],
convert->src_stride[i], convert->src_offset[i]);
}
videoconvert_convert_lookup_fastpath (convert);
videoconvert_convert_lookup_getput (convert);