videoconvert: run chroma resamplers

Run the chroma upsampler after unpack and the chroma subsampler
before pack for higher quality conversions and correct chroma siting.
This commit is contained in:
Wim Taymans 2013-05-17 16:34:30 +02:00
parent e5bc2b0d05
commit 7b8ff707b6
2 changed files with 216 additions and 66 deletions

View file

@ -39,6 +39,7 @@ static void videoconvert_convert_matrix16 (VideoConvert * convert,
gpointer pixels); gpointer pixels);
static gboolean videoconvert_convert_lookup_fastpath (VideoConvert * convert); static gboolean videoconvert_convert_lookup_fastpath (VideoConvert * convert);
static gboolean videoconvert_convert_compute_matrix (VideoConvert * convert); static gboolean videoconvert_convert_compute_matrix (VideoConvert * convert);
static gboolean videoconvert_convert_compute_resample (VideoConvert * convert);
static void videoconvert_dither_verterr (VideoConvert * convert, static void videoconvert_dither_verterr (VideoConvert * convert,
guint16 * pixels, int j); guint16 * pixels, int j);
static void videoconvert_dither_halftone (VideoConvert * convert, static void videoconvert_dither_halftone (VideoConvert * convert,
@ -49,7 +50,7 @@ VideoConvert *
videoconvert_convert_new (GstVideoInfo * in_info, GstVideoInfo * out_info) videoconvert_convert_new (GstVideoInfo * in_info, GstVideoInfo * out_info)
{ {
VideoConvert *convert; VideoConvert *convert;
int width, lines; gint width;
convert = g_malloc0 (sizeof (VideoConvert)); convert = g_malloc0 (sizeof (VideoConvert));
@ -57,21 +58,21 @@ videoconvert_convert_new (GstVideoInfo * in_info, GstVideoInfo * out_info)
convert->out_info = *out_info; convert->out_info = *out_info;
convert->dither16 = NULL; convert->dither16 = NULL;
convert->width = GST_VIDEO_INFO_WIDTH (in_info);
convert->height = GST_VIDEO_INFO_HEIGHT (in_info);
if (!videoconvert_convert_lookup_fastpath (convert)) { if (!videoconvert_convert_lookup_fastpath (convert)) {
convert->convert = videoconvert_convert_generic; convert->convert = videoconvert_convert_generic;
if (!videoconvert_convert_compute_matrix (convert)) if (!videoconvert_convert_compute_matrix (convert))
goto no_convert; goto no_convert;
if (!videoconvert_convert_compute_resample (convert))
goto no_convert;
} }
convert->width = GST_VIDEO_INFO_WIDTH (in_info);
convert->height = GST_VIDEO_INFO_HEIGHT (in_info);
width = convert->width; width = convert->width;
lines = out_info->finfo->pack_lines;
convert->lines = lines; convert->lines = out_info->finfo->pack_lines;
convert->tmpline = g_malloc (lines * sizeof (guint16) * (width + 8) * 4);
convert->errline = g_malloc0 (sizeof (guint16) * width * 4); convert->errline = g_malloc0 (sizeof (guint16) * width * 4);
return convert; return convert;
@ -87,7 +88,11 @@ no_convert:
void void
videoconvert_convert_free (VideoConvert * convert) videoconvert_convert_free (VideoConvert * convert)
{ {
g_free (convert->tmpline); gint i;
for (i = 0; i < convert->lines; i++)
g_free (convert->tmplines[i]);
g_free (convert->tmplines);
g_free (convert->errline); g_free (convert->errline);
g_free (convert); g_free (convert);
@ -368,8 +373,82 @@ videoconvert_dither_halftone (VideoConvert * convert, guint16 * pixels, int j)
} }
} }
static gboolean
videoconvert_convert_compute_resample (VideoConvert * convert)
{
GstVideoInfo *in_info, *out_info;
const GstVideoFormatInfo *sfinfo, *dfinfo;
gint lines, i;
gint width;
in_info = &convert->in_info;
out_info = &convert->out_info;
sfinfo = in_info->finfo;
dfinfo = out_info->finfo;
width = convert->width;
convert->upsample = gst_video_chroma_resample_new (0,
in_info->chroma_site, 0, sfinfo->unpack_format, sfinfo->w_sub[2],
sfinfo->h_sub[2]);
if (convert->upsample) {
gst_video_chroma_resample_get_info (convert->upsample,
&convert->up_n_lines, &convert->up_offset);
} else {
convert->up_n_lines = 1;
convert->up_offset = 0;
}
GST_DEBUG ("upsample: %p, offset %d, n_lines %d", convert->upsample,
convert->up_offset, convert->up_n_lines);
convert->downsample = gst_video_chroma_resample_new (0,
out_info->chroma_site, 0, dfinfo->unpack_format, -dfinfo->w_sub[2],
-dfinfo->h_sub[2]);
if (convert->downsample) {
gst_video_chroma_resample_get_info (convert->downsample,
&convert->down_n_lines, &convert->down_offset);
} else {
convert->down_n_lines = 1;
convert->down_offset = 0;
}
GST_DEBUG ("downsample: %p, offset %d, n_lines %d", convert->downsample,
convert->down_offset, convert->down_n_lines);
lines = MAX (convert->down_n_lines, convert->up_n_lines);
convert->tmplines = g_malloc (lines * sizeof (gpointer));
for (i = 0; i < lines; i++)
convert->tmplines[i] = g_malloc (sizeof (guint16) * (width + 8) * 4);
return TRUE;
}
#define TO_16(x) (((x)<<8) | (x)) #define TO_16(x) (((x)<<8) | (x))
static void
convert_to16 (gpointer line, gint width)
{
guint8 *line8 = line;
guint16 *line16 = line;
gint i;
for (i = (width - 1) * 4; i >= 0; i--)
line16[i] = TO_16 (line8[i]);
}
static void
convert_to8 (gpointer line, gint width)
{
guint8 *line8 = line;
guint16 *line16 = line;
gint i;
for (i = 0; i < width * 4; i++)
line8[i] = line16[i] >> 8;
}
#define UNPACK_FRAME(frame,dest,line,width) \ #define UNPACK_FRAME(frame,dest,line,width) \
frame->info.finfo->unpack_func (frame->info.finfo, \ frame->info.finfo->unpack_func (frame->info.finfo, \
(GST_VIDEO_FRAME_IS_INTERLACED (frame) ? \ (GST_VIDEO_FRAME_IS_INTERLACED (frame) ? \
@ -389,13 +468,18 @@ static void
videoconvert_convert_generic (VideoConvert * convert, GstVideoFrame * dest, videoconvert_convert_generic (VideoConvert * convert, GstVideoFrame * dest,
const GstVideoFrame * src) const GstVideoFrame * src)
{ {
int i, j, k; int j, k;
gint width, height, lines; gint width, height, lines, max_lines;
guint in_bits, out_bits; guint in_bits, out_bits;
gconstpointer pal; gconstpointer pal;
gsize palsize; gsize palsize;
guint8 *tmpline8; guint up_n_lines, down_n_lines;
guint16 *tmpline16; gint up_offset, down_offset;
gint in_lines, out_lines;
gint up_line, down_line;
gint start_offset, stop_offset;
gpointer in_tmplines[8];
gpointer out_tmplines[8];
height = convert->height; height = convert->height;
width = convert->width; width = convert->width;
@ -404,48 +488,105 @@ videoconvert_convert_generic (VideoConvert * convert, GstVideoFrame * dest,
out_bits = convert->out_bits; out_bits = convert->out_bits;
lines = convert->lines; lines = convert->lines;
up_n_lines = convert->up_n_lines;
up_offset = convert->up_offset;
down_n_lines = convert->down_n_lines;
down_offset = convert->down_offset;
max_lines = MAX (down_n_lines, up_n_lines);
for (j = 0; j < height; j += lines) { in_lines = 0;
tmpline8 = convert->tmpline; out_lines = 0;
tmpline16 = convert->tmpline;
for (k = 0; k < lines; k++) { GST_DEBUG ("up_offset %d, up_n_lines %u", up_offset, up_n_lines);
if (in_bits == 16) {
UNPACK_FRAME (src, tmpline16, j + k, width);
} else {
UNPACK_FRAME (src, tmpline8, j + k, width);
if (out_bits == 16) start_offset = MIN (up_offset, down_offset);
for (i = (width - 1) * 4; i >= 0; i--) stop_offset = height + start_offset + MAX (up_n_lines, down_n_lines);
tmpline16[i] = TO_16 (tmpline8[i]);
for (; start_offset < stop_offset; start_offset++) {
guint idx, start;
idx = CLAMP (start_offset, 0, height);
in_tmplines[in_lines] = convert->tmplines[idx % max_lines];
out_tmplines[out_lines] = in_tmplines[in_lines];
GST_DEBUG ("start_offset %d, %d, idx %u, in %d, out %d", start_offset,
up_offset, idx, in_lines, out_lines);
up_line = up_offset + in_lines;
/* extract the next line */
if (up_line >= 0 && up_line < height) {
GST_DEBUG ("unpack line %d", up_line);
UNPACK_FRAME (src, in_tmplines[in_lines], up_line, width);
} }
if (start_offset >= up_offset)
in_lines++;
if (start_offset >= down_offset)
out_lines++;
if (in_lines < up_n_lines)
continue;
in_lines = 0;
/* we have enough lines to upsample */
if (convert->upsample) {
GST_DEBUG ("doing upsample");
gst_video_chroma_resample (convert->upsample, in_tmplines, width);
}
/* convert upsampled lines */
for (k = 0; k < up_n_lines; k++) {
down_line = up_offset + k;
/* only takes lines with valid output */
if (down_line < 0 || down_line >= height)
continue;
GST_DEBUG ("handle line %d, %d/%d, down_line %d", k, out_lines,
down_n_lines, down_line);
if (out_bits == 16 || in_bits == 16) { if (out_bits == 16 || in_bits == 16) {
/* FIXME, we can scale in the conversion matrix */
if (in_bits == 8)
convert_to16 (in_tmplines[k], width);
if (convert->matrix) if (convert->matrix)
convert->matrix (convert, tmpline16); convert->matrix (convert, in_tmplines[k]);
if (convert->dither16) if (convert->dither16)
convert->dither16 (convert, tmpline16, j); convert->dither16 (convert, in_tmplines[k], down_line);
tmpline8 += width * 8;
tmpline16 += width * 4; if (out_bits == 8)
convert_to8 (in_tmplines[k], width);
} else { } else {
if (convert->matrix) if (convert->matrix)
convert->matrix (convert, tmpline8); convert->matrix (convert, in_tmplines[k]);
tmpline8 += width * 4;
} }
} }
tmpline8 = convert->tmpline;
tmpline16 = convert->tmpline;
if (out_bits == 16) { start = 0;
PACK_FRAME (dest, tmpline16, j, width); while (out_lines >= down_n_lines) {
} else { GST_DEBUG ("doing downsample %u", start);
if (in_bits == 16) if (convert->downsample)
for (i = 0; i < width * 4 * lines; i++) gst_video_chroma_resample (convert->downsample,
tmpline8[i] = tmpline16[i] >> 8; &out_tmplines[start], width);
PACK_FRAME (dest, tmpline8, j, width); for (j = 0; j < down_n_lines; j += lines) {
idx = down_offset + j;
if (idx >= 0 && idx < height) {
GST_DEBUG ("packing line %d %d %d", j + start, down_offset, idx);
/* FIXME, not correct if lines > 1 */
PACK_FRAME (dest, out_tmplines[j + start], idx, width);
} }
} }
down_offset += down_n_lines;
start += down_n_lines;
out_lines -= down_n_lines;
}
up_offset += up_n_lines;
}
if ((pal = if ((pal =
gst_video_format_get_palette (GST_VIDEO_FRAME_FORMAT (dest), gst_video_format_get_palette (GST_VIDEO_FRAME_FORMAT (dest),
&palsize))) { &palsize))) {
@ -513,8 +654,8 @@ convert_I420_YUY2 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -541,8 +682,8 @@ convert_I420_UYVY (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -568,8 +709,8 @@ convert_I420_AYUV (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -618,8 +759,8 @@ convert_I420_Y444 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -649,8 +790,8 @@ convert_YUY2_I420 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -668,8 +809,8 @@ convert_YUY2_AYUV (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -724,8 +865,8 @@ convert_UYVY_I420 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -743,8 +884,8 @@ convert_UYVY_AYUV (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -844,8 +985,8 @@ convert_AYUV_Y42B (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -886,8 +1027,8 @@ convert_Y42B_I420 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -976,8 +1117,8 @@ convert_Y444_I420 (VideoConvert * convert, GstVideoFrame * dest,
/* now handle last line */ /* now handle last line */
if (height & 1) { if (height & 1) {
UNPACK_FRAME (src, convert->tmpline, height - 1, width); UNPACK_FRAME (src, convert->tmplines[0], height - 1, width);
PACK_FRAME (dest, convert->tmpline, height - 1, width); PACK_FRAME (dest, convert->tmplines[0], height - 1, width);
} }
} }
@ -1143,6 +1284,7 @@ typedef struct
void (*convert) (VideoConvert * convert, GstVideoFrame * dest, void (*convert) (VideoConvert * convert, GstVideoFrame * dest,
const GstVideoFrame * src); const GstVideoFrame * src);
} VideoTransform; } VideoTransform;
static const VideoTransform transforms[] = { static const VideoTransform transforms[] = {
{GST_VIDEO_FORMAT_I420, GST_VIDEO_COLOR_MATRIX_UNKNOWN, GST_VIDEO_FORMAT_YUY2, {GST_VIDEO_FORMAT_I420, GST_VIDEO_COLOR_MATRIX_UNKNOWN, GST_VIDEO_FORMAT_YUY2,
GST_VIDEO_COLOR_MATRIX_UNKNOWN, TRUE, TRUE, convert_I420_YUY2}, GST_VIDEO_COLOR_MATRIX_UNKNOWN, TRUE, TRUE, convert_I420_YUY2},

View file

@ -48,12 +48,20 @@ struct _VideoConvert {
guint lines; guint lines;
gpointer tmpline; gpointer *tmplines;
guint16 *errline; guint16 *errline;
GstVideoChromaResample *upsample;
guint up_n_lines;
gint up_offset;
GstVideoChromaResample *downsample;
guint down_n_lines;
gint down_offset;
void (*convert) (VideoConvert *convert, GstVideoFrame *dest, const GstVideoFrame *src); void (*convert) (VideoConvert *convert, GstVideoFrame *dest, const GstVideoFrame *src);
void (*matrix) (VideoConvert *convert, gpointer pixels); void (*matrix) (VideoConvert *convert, gpointer pixels);
void (*dither16) (VideoConvert *convert, guint16 * pixels, int j); void (*dither16) (VideoConvert *convert, guint16 * pixels, int j);
}; };
VideoConvert * videoconvert_convert_new (GstVideoInfo *in_info, VideoConvert * videoconvert_convert_new (GstVideoInfo *in_info,