gst/ffmpegcolorspace/: Added AYUV colorspace and handle RGBA a bit more respectful.

Original commit message from CVS:
* gst/ffmpegcolorspace/avcodec.h:
* gst/ffmpegcolorspace/gstffmpegcodecmap.c:
(gst_ffmpeg_pixfmt_to_caps), (gst_ffmpeg_caps_to_pixfmt),
(gst_ffmpegcsp_avpicture_fill):
* gst/ffmpegcolorspace/gstffmpegcolorspace.c:
(gst_ffmpegcsp_caps_remove_format_info):
* gst/ffmpegcolorspace/imgconvert.c: (avpicture_get_size),
(shrink12), (img_get_alpha_info), (deinterlace_line),
(deinterlace_line_inplace):
* gst/ffmpegcolorspace/imgconvert_template.h:
Added AYUV colorspace and handle RGBA a bit more respectful.
This commit is contained in:
Wim Taymans 2004-11-08 15:18:15 +00:00
parent 6169fd7d0d
commit d16e066b92
6 changed files with 266 additions and 9 deletions

View file

@ -1,3 +1,17 @@
2004-11-08 Wim Taymans <wim@fluendo.com>
* gst/ffmpegcolorspace/avcodec.h:
* gst/ffmpegcolorspace/gstffmpegcodecmap.c:
(gst_ffmpeg_pixfmt_to_caps), (gst_ffmpeg_caps_to_pixfmt),
(gst_ffmpegcsp_avpicture_fill):
* gst/ffmpegcolorspace/gstffmpegcolorspace.c:
(gst_ffmpegcsp_caps_remove_format_info):
* gst/ffmpegcolorspace/imgconvert.c: (avpicture_get_size),
(shrink12), (img_get_alpha_info), (deinterlace_line),
(deinterlace_line_inplace):
* gst/ffmpegcolorspace/imgconvert_template.h:
Added AYUV colorspace and handle RGBA a bit more respectful.
2004-11-08 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* ext/ogg/gstoggdemux.c: (gst_ogg_pad_push):

View file

@ -59,6 +59,7 @@ enum PixelFormat {
PIX_FMT_YUV422P, ///< Planar YUV 4:2:2 (1 Cr & Cb sample per 2x1 Y samples)
PIX_FMT_YUV444P, ///< Planar YUV 4:4:4 (1 Cr & Cb sample per 1x1 Y samples)
PIX_FMT_RGBA32, ///< Packed pixel, 4 bytes per pixel, BGRABGRA..., stored in cpu endianness
PIX_FMT_RGB32, ///< Packed pixel, 4 bytes per pixel, BGRXBGRX..., stored in cpu endianness
PIX_FMT_YUV410P, ///< Planar YUV 4:1:0 (1 Cr & Cb sample per 4x4 Y samples)
PIX_FMT_YUV411P, ///< Planar YUV 4:1:1 (1 Cr & Cb sample per 4x1 Y samples)
PIX_FMT_RGB565, ///< always stored in cpu endianness
@ -74,6 +75,8 @@ enum PixelFormat {
PIX_FMT_XVMC_MPEG2_IDCT,
PIX_FMT_UYVY422, ///< Packed pixel, Cb Y0 Cr Y1
PIX_FMT_UYVY411, ///< Packed pixel, Cb Y0 Y1 Cr Y2 Y3
PIX_FMT_AYUV4444, ///< Packed pixel, A0 Y0 Cb Cr
PIX_FMT_NB,
};

View file

@ -115,7 +115,7 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context)
GstCaps *caps = NULL;
int bpp = 0, depth = 0, endianness = 0;
gulong g_mask = 0, r_mask = 0, b_mask = 0;
gulong g_mask = 0, r_mask = 0, b_mask = 0, a_mask = 0;
guint32 fmt = 0;
switch (pix_fmt) {
@ -145,7 +145,7 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context)
case PIX_FMT_YUV444P:
/* .. */
break;
case PIX_FMT_RGBA32:
case PIX_FMT_RGB32:
bpp = 32;
depth = 24;
endianness = G_BIG_ENDIAN;
@ -157,6 +157,22 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context)
r_mask = 0x0000ff00;
g_mask = 0x00ff0000;
b_mask = 0xff000000;
#endif
break;
case PIX_FMT_RGBA32:
bpp = 32;
depth = 32;
endianness = G_BIG_ENDIAN;
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
r_mask = 0x000000ff;
g_mask = 0x0000ff00;
b_mask = 0x00ff0000;
a_mask = 0xff000000;
#else
r_mask = 0xff000000;
g_mask = 0x00ff0000;
b_mask = 0x0000ff00;
a_mask = 0x000000ff;
#endif
break;
case PIX_FMT_YUV410P:
@ -184,13 +200,25 @@ gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context)
bpp = depth = 8;
endianness = G_BYTE_ORDER;
break;
case PIX_FMT_AYUV4444:
fmt = GST_MAKE_FOURCC ('A', 'Y', 'U', 'V');
break;
default:
/* give up ... */
break;
}
if (bpp != 0) {
if (r_mask != 0) {
if (a_mask != 0) {
caps = GST_FF_VID_CAPS_NEW ("video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"red_mask", G_TYPE_INT, r_mask,
"green_mask", G_TYPE_INT, g_mask,
"blue_mask", G_TYPE_INT, b_mask,
"alpha_mask", G_TYPE_INT, a_mask,
"endianness", G_TYPE_INT, endianness, NULL);
} else if (r_mask != 0) {
caps = GST_FF_VID_CAPS_NEW ("video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
@ -410,6 +438,9 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps,
case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
context->pix_fmt = PIX_FMT_YUV410P;
break;
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
context->pix_fmt = PIX_FMT_AYUV4444;
break;
#if 0
case FIXME:
context->pix_fmt = PIX_FMT_YUV444P;
@ -419,19 +450,23 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps,
}
} else if (strcmp (gst_structure_get_name (structure),
"video/x-raw-rgb") == 0) {
gint bpp = 0, rmask = 0, endianness = 0;
gint bpp = 0, rmask = 0, endianness = 0, amask = 0;
if (gst_structure_get_int (structure, "bpp", &bpp) &&
gst_structure_get_int (structure, "endianness", &endianness)) {
if (gst_structure_get_int (structure, "red_mask", &rmask)) {
switch (bpp) {
case 32:
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
if (rmask == 0x00ff0000)
#else
if (rmask == 0x0000ff00)
#endif
if (gst_structure_get_int (structure, "alpha_mask", &amask)) {
context->pix_fmt = PIX_FMT_RGBA32;
} else {
#if (G_BYTE_ORDER == G_BIG_ENDIAN)
if (rmask == 0x00ff0000)
#else
if (rmask == 0x0000ff00)
#endif
context->pix_fmt = PIX_FMT_RGB32;
}
break;
case 24:
if (rmask == 0x0000FF)
@ -569,6 +604,15 @@ static PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
.depth = 8,
.x_chroma_shift = 2,.y_chroma_shift = 0,
},
[PIX_FMT_YUV422] = {
.name = "ayuv4444",
.nb_channels = 1,
.is_alpha = 1,
.color_type = FF_COLOR_YUV,
.pixel_type = FF_PIXEL_PACKED,
.depth = 8,
.x_chroma_shift = 0,.y_chroma_shift = 0,
},
/* JPEG YUV */
[PIX_FMT_YUVJ420P] = {
@ -613,6 +657,14 @@ static PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
.depth = 8,
.x_chroma_shift = 0,.y_chroma_shift = 0,
},
[PIX_FMT_RGB32] = {
.name = "rgb32",
.nb_channels = 4,
.color_type = FF_COLOR_RGB,
.pixel_type = FF_PIXEL_PACKED,
.depth = 8,
.x_chroma_shift = 0,.y_chroma_shift = 0,
},
[PIX_FMT_RGBA32] = {
.name = "rgba32",
.nb_channels = 4,.is_alpha = 1,
@ -720,6 +772,8 @@ gst_ffmpegcsp_avpicture_fill (AVPicture * picture,
picture->data[2] = NULL;
picture->linesize[0] = stride;
return size;
case PIX_FMT_AYUV4444:
case PIX_FMT_RGB32:
case PIX_FMT_RGBA32:
stride = width * 4;
size = stride * height;

View file

@ -126,6 +126,7 @@ gst_ffmpegcsp_caps_remove_format_info (GstCaps * caps)
gst_structure_remove_field (structure, "red_mask");
gst_structure_remove_field (structure, "green_mask");
gst_structure_remove_field (structure, "blue_mask");
gst_structure_remove_field (structure, "alpha_mask");
}
gst_caps_do_simplify (caps);

View file

@ -60,6 +60,7 @@ typedef struct PixFmtInfo
} PixFmtInfo;
/* this table gives more information about formats */
/* FIXME, this table is also in ffmpegcodecmap */
static PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
/* YUV formats */
[PIX_FMT_YUV420P] = {
@ -162,6 +163,14 @@ static PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
.depth = 8,
.x_chroma_shift = 0,.y_chroma_shift = 0,
},
[PIX_FMT_RGB32] = {
.name = "rgb32",
.nb_channels = 4,
.color_type = FF_COLOR_RGB,
.pixel_type = FF_PIXEL_PACKED,
.depth = 8,
.x_chroma_shift = 0,.y_chroma_shift = 0,
},
[PIX_FMT_RGBA32] = {
.name = "rgba32",
.nb_channels = 4,.is_alpha = 1,
@ -1457,6 +1466,37 @@ bitcopy_n (unsigned int a, int n)
#include "imgconvert_template.h"
/* rgb32 handling */
#define RGB_NAME rgb32
#define FMT_RGBA32
#define RGB_IN(r, g, b, s)\
{\
unsigned int v = ((const uint32_t *)(s))[0];\
r = (v >> 16) & 0xff;\
g = (v >> 8) & 0xff;\
b = v & 0xff;\
}
#define RGBA_IN(r, g, b, a, s)\
{\
unsigned int v = ((const uint32_t *)(s))[0];\
a = 0xff;\
r = (v >> 16) & 0xff;\
g = (v >> 8) & 0xff;\
b = v & 0xff;\
}
#define RGBA_OUT(d, r, g, b, a)\
{\
((uint32_t *)(d))[0] = (a << 24) | (r << 16) | (g << 8) | b;\
}
#define BPP 4
#include "imgconvert_template.h"
/* rgba32 handling */
#define RGB_NAME rgba32
@ -1639,6 +1679,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = yuv420p_to_bgr24},
[PIX_FMT_RGB24] = {
.convert = yuv420p_to_rgb24},
[PIX_FMT_RGB32] = {
.convert = yuv420p_to_rgb32},
[PIX_FMT_RGBA32] = {
.convert = yuv420p_to_rgba32},
},
@ -1663,6 +1705,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = yuvj420p_to_bgr24},
[PIX_FMT_RGB24] = {
.convert = yuvj420p_to_rgb24},
[PIX_FMT_RGB32] = {
.convert = yuvj420p_to_rgb32},
[PIX_FMT_RGBA32] = {
.convert = yuvj420p_to_rgba32},
},
@ -1693,6 +1737,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = rgb24_to_rgb565},
[PIX_FMT_RGB555] = {
.convert = rgb24_to_rgb555},
[PIX_FMT_RGB32] = {
.convert = rgb24_to_rgb32},
[PIX_FMT_RGBA32] = {
.convert = rgb24_to_rgba32},
[PIX_FMT_BGR24] = {
@ -1707,6 +1753,20 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = rgb24_to_yuvj420p},
[PIX_FMT_YUVJ444P] = {
.convert = rgb24_to_yuvj444p},
[PIX_FMT_AYUV4444] = {
.convert = rgb24_to_ayuv4444},
},
[PIX_FMT_RGB32] = {
[PIX_FMT_RGB24] = {
.convert = rgb32_to_rgb24},
[PIX_FMT_RGB555] = {
.convert = rgba32_to_rgb555},
[PIX_FMT_PAL8] = {
.convert = rgb32_to_pal8},
[PIX_FMT_YUV420P] = {
.convert = rgb32_to_yuv420p},
[PIX_FMT_GRAY8] = {
.convert = rgb32_to_gray},
},
[PIX_FMT_RGBA32] = {
[PIX_FMT_RGB24] = {
@ -1719,6 +1779,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = rgba32_to_yuv420p},
[PIX_FMT_GRAY8] = {
.convert = rgba32_to_gray},
[PIX_FMT_AYUV4444] = {
.convert = rgba32_to_ayuv4444},
},
[PIX_FMT_BGR24] = {
[PIX_FMT_RGB24] = {
@ -1731,6 +1793,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
[PIX_FMT_RGB555] = {
[PIX_FMT_RGB24] = {
.convert = rgb555_to_rgb24},
[PIX_FMT_RGB32] = {
.convert = rgb555_to_rgba32},
[PIX_FMT_RGBA32] = {
.convert = rgb555_to_rgba32},
[PIX_FMT_YUV420P] = {
@ -1755,6 +1819,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = gray_to_rgb24},
[PIX_FMT_BGR24] = {
.convert = gray_to_bgr24},
[PIX_FMT_RGB32] = {
.convert = gray_to_rgb32},
[PIX_FMT_RGBA32] = {
.convert = gray_to_rgba32},
[PIX_FMT_MONOWHITE] = {
@ -1779,6 +1845,8 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = pal8_to_bgr24},
[PIX_FMT_RGB24] = {
.convert = pal8_to_rgb24},
[PIX_FMT_RGB32] = {
.convert = pal8_to_rgb32},
[PIX_FMT_RGBA32] = {
.convert = pal8_to_rgba32},
},
@ -1787,6 +1855,14 @@ static ConvertEntry convert_table[PIX_FMT_NB][PIX_FMT_NB] = {
.convert = uyvy411_to_yuv411p,
},
},
[PIX_FMT_AYUV4444] = {
[PIX_FMT_RGBA32] = {
.convert = ayuv4444_to_rgba32,
},
[PIX_FMT_RGB24] = {
.convert = ayuv4444_to_rgb24,
},
},
};
@ -2093,6 +2169,9 @@ img_get_alpha_info (const AVPicture * src, int pix_fmt, int width, int height)
if (!pf->is_alpha)
return 0;
switch (pix_fmt) {
case PIX_FMT_RGB32:
ret = get_alpha_info_rgb32 (src, width, height);
break;
case PIX_FMT_RGBA32:
ret = get_alpha_info_rgba32 (src, width, height);
break;

View file

@ -468,6 +468,60 @@ static void glue(RGB_NAME, _to_rgba32)(AVPicture *dst, const AVPicture *src,
}
}
static void ayuv4444_to_rgba32(AVPicture *dst, const AVPicture *src,
int width, int height)
{
uint8_t *s, *d, *d1, *s1;
int w, y, cb, cr, r_add, g_add, b_add;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b, a;
d = dst->data[0];
s = src->data[0];
for(;height > 0; height --) {
d1 = d;
s1 = s;
for(w = width; w > 0; w--) {
a = s1[0];
YUV_TO_RGB1_CCIR(s1[2], s1[3]);
YUV_TO_RGB2_CCIR(r, g, b, s1[1]);
RGBA_OUT(d1, r, g, b, a);
d1 += BPP;
s1 += 4;
}
d += dst->linesize[0];
s += src->linesize[0];
}
}
static void rgba32_to_ayuv4444(AVPicture *dst, const AVPicture *src,
int width, int height)
{
int src_wrap, dst_wrap, x, y;
int r, g, b, a;
uint8_t *d;
const uint8_t *p;
src_wrap = src->linesize[0] - width * BPP;
dst_wrap = dst->linesize[0] - width * 4;
d = dst->data[0];
p = src->data[0];
for(y=0;y<height;y++) {
for(x=0;x<width;x++) {
RGBA_IN(r, g, b, a, p);
d[0] = a;
d[1] = RGB_TO_Y_CCIR(r, g, b);
d[2] = RGB_TO_U_CCIR(r, g, b, 0);
d[3] = RGB_TO_V_CCIR(r, g, b, 0);
p += BPP;
d += 4;
}
p += src_wrap;
d += dst_wrap;
}
}
#endif /* !defined(FMT_RGBA32) && defined(RGBA_IN) */
#ifndef FMT_RGB24
@ -769,6 +823,58 @@ static void rgb24_to_yuvj444p(AVPicture *dst, const AVPicture *src,
}
}
static void ayuv4444_to_rgb24(AVPicture *dst, const AVPicture *src,
int width, int height)
{
uint8_t *s, *d, *d1, *s1;
int w, y, cb, cr, r_add, g_add, b_add;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b;
d = dst->data[0];
s = src->data[0];
for(;height > 0; height --) {
d1 = d;
s1 = s;
for(w = width; w > 0; w--) {
YUV_TO_RGB1_CCIR(s1[2], s1[3]);
YUV_TO_RGB2_CCIR(r, g, b, s1[1]);
RGB_OUT(d1, r, g, b);
d1 += BPP;
s1 += 4;
}
d += dst->linesize[0];
s += src->linesize[0];
}
}
static void rgb24_to_ayuv4444(AVPicture *dst, const AVPicture *src,
int width, int height)
{
int src_wrap, dst_wrap, x, y;
int r, g, b;
uint8_t *d;
const uint8_t *p;
src_wrap = src->linesize[0] - width * BPP;
dst_wrap = dst->linesize[0] - width * 4;
d = dst->data[0];
p = src->data[0];
for(y=0;y<height;y++) {
for(x=0;x<width;x++) {
RGB_IN(r, g, b, p);
d[0] = 0xff;
d[1] = RGB_TO_Y_CCIR(r, g, b);
d[2] = RGB_TO_U_CCIR(r, g, b, 0);
d[3] = RGB_TO_V_CCIR(r, g, b, 0);
p += BPP;
d += 4;
}
p += src_wrap;
d += dst_wrap;
}
}
#endif /* FMT_RGB24 */
#if defined(FMT_RGB24) || defined(FMT_RGBA32)