jpegenc: support more colour spaces and some cleanups

This commit is contained in:
Mark Nauwelaerts 2010-04-27 12:19:22 +02:00
parent c5614dbb40
commit b0ac4a4560
3 changed files with 212 additions and 117 deletions

View file

@ -9,7 +9,8 @@ libgstjpeg_la_SOURCES = \
gstsmokedec.c
libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
libgstjpeg_la_LIBADD = $(GST_LIBS) $(JPEG_LIBS) $(LIBM)
libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
$(JPEG_LIBS) $(LIBM)
libgstjpeg_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstjpeg_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -51,20 +51,6 @@ GST_DEBUG_CATEGORY_STATIC (jpegenc_debug);
#define JPEG_DEFAULT_SMOOTHING 0
#define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
/* These macros are adapted from videotestsrc.c
* and/or gst-plugins/gst/games/gstvideoimage.c */
/* I420 */
#define I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(I420_Y_ROWSTRIDE(width)))/2)
#define I420_Y_OFFSET(w,h) (0)
#define I420_U_OFFSET(w,h) (I420_Y_OFFSET(w,h)+(I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define I420_V_OFFSET(w,h) (I420_U_OFFSET(w,h)+(I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
/* JpegEnc signals and args */
enum
{
@ -81,6 +67,7 @@ enum
PROP_IDCT_METHOD
};
static void gst_jpegenc_reset (GstJpegEnc * enc);
static void gst_jpegenc_base_init (gpointer g_class);
static void gst_jpegenc_class_init (GstJpegEnc * klass);
static void gst_jpegenc_init (GstJpegEnc * jpegenc);
@ -127,12 +114,18 @@ gst_jpegenc_get_type (void)
return jpegenc_type;
}
/* *INDENT-OFF* */
static GstStaticPadTemplate gst_jpegenc_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, YVYU, Y444 }") "; "
GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; "
GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR)
);
/* *INDENT-ON* */
static GstStaticPadTemplate gst_jpegenc_src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src",
@ -293,11 +286,36 @@ gst_jpegenc_init (GstJpegEnc * jpegenc)
jpegenc->cinfo.dest = &jpegenc->jdest;
jpegenc->cinfo.client_data = jpegenc;
/* init properties */
jpegenc->quality = JPEG_DEFAULT_QUALITY;
jpegenc->smoothing = JPEG_DEFAULT_SMOOTHING;
jpegenc->idct_method = JPEG_DEFAULT_IDCT_METHOD;
gst_jpegenc_reset (jpegenc);
}
static void
gst_jpegenc_reset (GstJpegEnc * enc)
{
gint i, j;
g_free (enc->line[0]);
g_free (enc->line[1]);
g_free (enc->line[2]);
enc->line[0] = NULL;
enc->line[1] = NULL;
enc->line[2] = NULL;
for (i = 0; i < 3; i++) {
for (j = 0; j < 4 * DCTSIZE; j++) {
g_free (enc->row[i][j]);
}
}
enc->width = -1;
enc->height = -1;
enc->format = GST_VIDEO_FORMAT_UNKNOWN;
enc->fps_den = enc->par_den = 0;
enc->height = enc->width = 0;
}
static void
@ -355,41 +373,102 @@ gst_jpegenc_getcaps (GstPad * pad)
static gboolean
gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
{
GstJpegEnc *jpegenc;
const GValue *framerate;
GstStructure *structure;
GstCaps *pcaps;
GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
GstVideoFormat format;
gint width, height;
gint fps_num, fps_den;
gint par_num, par_den;
gint i;
GstCaps *othercaps;
gboolean ret;
jpegenc = GST_JPEGENC (gst_pad_get_parent (pad));
/* get info from caps */
if (!gst_video_format_parse_caps (caps, &format, &width, &height))
goto refuse_caps;
/* optional; pass along if present */
fps_num = fps_den = -1;
par_num = par_den = -1;
gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);
structure = gst_caps_get_structure (caps, 0);
framerate = gst_structure_get_value (structure, "framerate");
gst_structure_get_int (structure, "width", &jpegenc->width);
gst_structure_get_int (structure, "height", &jpegenc->height);
if (width == enc->width && height == enc->height && enc->format == format
&& fps_num == enc->fps_num && fps_den == enc->fps_den
&& par_num == enc->par_num && par_den == enc->par_den)
return TRUE;
pcaps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, jpegenc->width,
"height", G_TYPE_INT, jpegenc->height, NULL);
structure = gst_caps_get_structure (pcaps, 0);
if (framerate)
gst_structure_set_value (structure, "framerate", framerate);
/* store input description */
enc->format = format;
enc->width = width;
enc->height = height;
enc->fps_num = fps_num;
enc->fps_den = fps_den;
enc->par_num = par_num;
enc->par_den = par_den;
ret = gst_pad_set_caps (jpegenc->srcpad, pcaps);
/* prepare a cached image description */
enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
/* ... but any alpha is disregarded in encoding */
enc->channels = 3;
enc->h_max_samp = 0;
enc->v_max_samp = 0;
for (i = 0; i < enc->channels; ++i) {
enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
height);
enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i];
enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
}
/* samp should only be 1, 2 or 4 */
g_assert (enc->h_max_samp <= 4);
g_assert (enc->v_max_samp <= 4);
/* now invert */
/* maximum is invariant, as one of the components should have samp 1 */
for (i = 0; i < enc->channels; ++i) {
enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i];
enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i];
}
enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1);
othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
gst_caps_set_simple (othercaps,
"width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL);
if (enc->fps_den > 0)
gst_caps_set_simple (othercaps,
"framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL);
if (enc->par_den > 0)
gst_caps_set_simple (othercaps,
"pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den,
NULL);
ret = gst_pad_set_caps (enc->srcpad, othercaps);
gst_caps_unref (othercaps);
if (ret)
gst_jpegenc_resync (jpegenc);
gst_jpegenc_resync (enc);
gst_caps_unref (pcaps);
gst_object_unref (jpegenc);
gst_object_unref (enc);
return ret;
/* ERRORS */
refuse_caps:
{
GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
gst_object_unref (enc);
return FALSE;
}
}
static void
gst_jpegenc_resync (GstJpegEnc * jpegenc)
{
gint width, height;
gint i, j;
GST_DEBUG_OBJECT (jpegenc, "resync");
@ -398,50 +477,41 @@ gst_jpegenc_resync (GstJpegEnc * jpegenc)
jpegenc->cinfo.input_components = 3;
GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height);
GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format);
#ifdef ENABLE_COLORSPACE_RGB
switch (jpegenc->format) {
case GST_COLORSPACE_RGB24:
jpegenc->bufsize = jpegenc->width * jpegenc->height * 3;
GST_DEBUG ("gst_jpegenc_resync: setting format to RGB24");
if (gst_video_format_is_rgb (jpegenc->format)) {
GST_DEBUG_OBJECT (jpegenc, "RGB");
jpegenc->cinfo.in_color_space = JCS_RGB;
jpegenc->cinfo.raw_data_in = FALSE;
break;
case GST_COLORSPACE_YUV420P:
#endif
GST_DEBUG_OBJECT (jpegenc, "setting format to YUV420P");
jpegenc->bufsize = I420_SIZE (jpegenc->width, jpegenc->height);
} else {
GST_DEBUG_OBJECT (jpegenc, "YUV");
jpegenc->cinfo.in_color_space = JCS_YCbCr;
}
/* input buffer size as max output */
jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height);
jpeg_set_defaults (&jpegenc->cinfo);
/* these are set in _chain()
jpeg_set_quality (&jpegenc->cinfo, jpegenc->quality, TRUE);
jpegenc->cinfo.smoothing_factor = jpegenc->smoothing;
jpegenc->cinfo.dct_method = jpegenc->idct_method;
*/
jpegenc->cinfo.raw_data_in = TRUE;
/* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */
if (jpegenc->cinfo.in_color_space == JCS_RGB)
jpeg_set_colorspace (&jpegenc->cinfo, JCS_RGB);
if (height != -1) {
jpegenc->line[0] =
g_realloc (jpegenc->line[0], height * sizeof (char *));
jpegenc->line[1] =
g_realloc (jpegenc->line[1], height * sizeof (char *) / 2);
jpegenc->line[2] =
g_realloc (jpegenc->line[2], height * sizeof (char *) / 2);
GST_DEBUG_OBJECT (jpegenc, "h_max_samp=%d, v_max_samp=%d",
jpegenc->h_max_samp, jpegenc->v_max_samp);
/* image dimension info */
for (i = 0; i < 3; i++) {
GST_DEBUG_OBJECT (jpegenc, "comp %i: h_samp=%d, v_samp=%d", i,
jpegenc->h_samp[i], jpegenc->v_samp[i]);
jpegenc->cinfo.comp_info[i].h_samp_factor = jpegenc->h_samp[i];
jpegenc->cinfo.comp_info[i].v_samp_factor = jpegenc->v_samp[i];
jpegenc->line[i] = g_realloc (jpegenc->line[i],
jpegenc->v_max_samp * DCTSIZE * sizeof (char *));
if (!jpegenc->planar) {
for (j = 0; j < jpegenc->v_max_samp * DCTSIZE; j++) {
jpegenc->row[i][j] = g_realloc (jpegenc->row[i][j], width);
jpegenc->line[i][j] = jpegenc->row[i][j];
}
}
GST_DEBUG_OBJECT (jpegenc, "setting format done");
#ifdef ENABLE_COLORSPACE_RGB
break;
default:
printf ("gst_jpegenc_resync: unsupported colorspace, using RGB\n");
jpegenc->bufsize = jpegenc->width * jpegenc->height * 3;
jpegenc->cinfo.in_color_space = JCS_RGB;
break;
}
#endif
/* guard against a potential error in gst_jpegenc_term_destination
which occurs iff bufsize % 4 < free_space_remaining */
@ -487,19 +557,14 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
width = jpegenc->width;
height = jpegenc->height;
base[0] = data + I420_Y_OFFSET (width, height);
base[1] = data + I420_U_OFFSET (width, height);
base[2] = data + I420_V_OFFSET (width, height);
base[0] = data + jpegenc->offset[0];
base[1] = data + jpegenc->offset[1];
base[2] = data + jpegenc->offset[2];
end[0] = base[0] + height * I420_Y_ROWSTRIDE (width);
end[1] = base[1] + (height / 2) * I420_U_ROWSTRIDE (width);
end[2] = base[2] + (height / 2) * I420_V_ROWSTRIDE (width);
end[0] = base[0] + jpegenc->cheight[0] * jpegenc->stride[0];
end[1] = base[1] + jpegenc->cheight[1] * jpegenc->stride[1];
end[2] = base[2] + jpegenc->cheight[2] * jpegenc->stride[2];
/* FIXME: shouldn't we also set
* - jpegenc->cinfo.max_{v,h}_samp_factor
* - jpegenc->cinfo.comp_info[0,1,2].{v,h}_samp_factor
* accordingly?
*/
jpegenc->jdest.next_output_byte = GST_BUFFER_DATA (jpegenc->output_buffer);
jpegenc->jdest.free_in_buffer = GST_BUFFER_SIZE (jpegenc->output_buffer);
@ -514,23 +579,40 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
GST_LOG_OBJECT (jpegenc, "compressing");
for (i = 0; i < height; i += 2 * DCTSIZE) {
/*g_print ("next scanline: %d\n", jpegenc->cinfo.next_scanline); */
for (j = 0, k = 0; j < (2 * DCTSIZE); j += 2, k++) {
jpegenc->line[0][j] = base[0];
if (base[0] + I420_Y_ROWSTRIDE (width) < end[0])
base[0] += I420_Y_ROWSTRIDE (width);
jpegenc->line[0][j + 1] = base[0];
if (base[0] + I420_Y_ROWSTRIDE (width) < end[0])
base[0] += I420_Y_ROWSTRIDE (width);
jpegenc->line[1][k] = base[1];
if (base[1] + I420_U_ROWSTRIDE (width) < end[1])
base[1] += I420_U_ROWSTRIDE (width);
jpegenc->line[2][k] = base[2];
if (base[2] + I420_V_ROWSTRIDE (width) < end[2])
base[2] += I420_V_ROWSTRIDE (width);
if (jpegenc->planar) {
for (i = 0; i < height; i += jpegenc->v_max_samp * DCTSIZE) {
for (k = 0; k < jpegenc->channels; k++) {
for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
jpegenc->line[k][j] = base[k];
if (base[k] + jpegenc->stride[k] < end[k])
base[k] += jpegenc->stride[k];
}
}
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
jpegenc->v_max_samp * DCTSIZE);
}
} else {
for (i = 0; i < height; i += jpegenc->v_max_samp * DCTSIZE) {
for (k = 0; k < jpegenc->channels; k++) {
for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
guchar *src, *dst;
gint l;
/* ouch, copy line */
src = base[k];
dst = jpegenc->line[k][j];
for (l = jpegenc->cwidth[k]; l > 0; l--) {
*dst = *src;
src += jpegenc->inc[k];
dst++;
}
if (base[k] + jpegenc->stride[k] < end[k])
base[k] += jpegenc->stride[k];
}
}
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
jpegenc->v_max_samp * DCTSIZE);
}
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line, 2 * DCTSIZE);
}
/* This will ensure that gst_jpegenc_term_destination is called; we push
@ -630,15 +712,8 @@ gst_jpegenc_change_state (GstElement * element, GstStateChange transition)
return ret;
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
g_free (filter->line[0]);
g_free (filter->line[1]);
g_free (filter->line[2]);
filter->line[0] = NULL;
filter->line[1] = NULL;
filter->line[2] = NULL;
filter->width = -1;
filter->height = -1;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_jpegenc_reset (filter);
break;
default:
break;

View file

@ -23,6 +23,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
/* this is a hack hack hack to get around jpeglib header bugs... */
#ifdef HAVE_STDLIB_H
# undef HAVE_STDLIB_H
@ -41,9 +42,12 @@ G_BEGIN_DECLS
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_JPEGENC))
#define GST_IS_JPEGENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_JPEGENC))
typedef struct _GstJpegEnc GstJpegEnc;
typedef struct _GstJpegEncClass GstJpegEncClass;
#define GST_JPEG_ENC_MAX_COMPONENT 4
struct _GstJpegEnc
{
GstElement element;
@ -51,15 +55,30 @@ struct _GstJpegEnc
/* pads */
GstPad *sinkpad, *srcpad;
/* video state */
gint format;
/* stream/image properties */
GstVideoFormat format;
gint width;
gint height;
gint channels;
gint fps_num, fps_den;
gint par_num, par_den;
/* standard video_format indexed */
gint stride[GST_JPEG_ENC_MAX_COMPONENT];
gint offset[GST_JPEG_ENC_MAX_COMPONENT];
gint inc[GST_JPEG_ENC_MAX_COMPONENT];
gint cwidth[GST_JPEG_ENC_MAX_COMPONENT];
gint cheight[GST_JPEG_ENC_MAX_COMPONENT];
gint h_samp[GST_JPEG_ENC_MAX_COMPONENT];
gint v_samp[GST_JPEG_ENC_MAX_COMPONENT];
gint h_max_samp;
gint v_max_samp;
gboolean planar;
/* the video buffer */
gint bufsize;
guint row_stride;
/* the jpeg line buffer */
guchar **line[3];
/* indirect encoding line buffers */
guchar *row[3][4 * DCTSIZE];
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;