jpeg: port to 0.11

Also disable smoke for now.
This commit is contained in:
Wim Taymans 2011-08-17 15:39:27 +02:00
parent 48e47ad702
commit be4f60b062
7 changed files with 407 additions and 403 deletions

View file

@ -315,7 +315,7 @@ dnl Make sure you have a space before and after all plugins
GST_PLUGINS_NONPORTED=" deinterlace flx goom2k1 icydemux id3demux \ GST_PLUGINS_NONPORTED=" deinterlace flx goom2k1 icydemux id3demux \
imagefreeze interleave law matroska monoscope shapewipe smpte \ imagefreeze interleave law matroska monoscope shapewipe smpte \
videobox videocrop videomixer \ videobox videocrop videomixer \
annodex apetag cairo cairo_gobject dv1394 flac gdk_pixbuf jpeg libdv libpng \ annodex apetag cairo cairo_gobject dv1394 flac gdk_pixbuf libdv libpng \
oss oss4 shout2 \ oss oss4 shout2 \
soup taglib wavpack " soup taglib wavpack "
AC_SUBST(GST_PLUGINS_NONPORTED) AC_SUBST(GST_PLUGINS_NONPORTED)

View file

@ -3,10 +3,8 @@ plugin_LTLIBRARIES = libgstjpeg.la
libgstjpeg_la_SOURCES = \ libgstjpeg_la_SOURCES = \
gstjpeg.c \ gstjpeg.c \
gstjpegenc.c \ gstjpegenc.c \
gstjpegdec.c \ gstjpegdec.c
gstsmokeenc.c \ # deprected gstsmokeenc.c smokecodec.c gstsmokedec.c
smokecodec.c \
gstsmokedec.c
libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_MAJORMINOR) \ libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
@ -16,6 +14,5 @@ libgstjpeg_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = \ noinst_HEADERS = \
gstjpeg.h \ gstjpeg.h \
gstjpegdec.h gstjpegenc.h \ gstjpegdec.h gstjpegenc.h
gstsmokeenc.h gstsmokedec.h \ # deprecated gstsmokeenc.h gstsmokedec.h smokecodec.h smokeformat.h
smokecodec.h smokeformat.h

View file

@ -27,8 +27,10 @@
#include "gstjpeg.h" #include "gstjpeg.h"
#include "gstjpegdec.h" #include "gstjpegdec.h"
#include "gstjpegenc.h" #include "gstjpegenc.h"
#if 0
#include "gstsmokeenc.h" #include "gstsmokeenc.h"
#include "gstsmokedec.h" #include "gstsmokedec.h"
#endif
GType GType
gst_idct_method_get_type (void) gst_idct_method_get_type (void)
@ -59,6 +61,7 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_JPEG_DEC)) GST_TYPE_JPEG_DEC))
return FALSE; return FALSE;
#if 0
if (!gst_element_register (plugin, "smokeenc", GST_RANK_PRIMARY, if (!gst_element_register (plugin, "smokeenc", GST_RANK_PRIMARY,
GST_TYPE_SMOKEENC)) GST_TYPE_SMOKEENC))
return FALSE; return FALSE;
@ -66,6 +69,7 @@ plugin_init (GstPlugin * plugin)
if (!gst_element_register (plugin, "smokedec", GST_RANK_PRIMARY, if (!gst_element_register (plugin, "smokedec", GST_RANK_PRIMARY,
GST_TYPE_SMOKEDEC)) GST_TYPE_SMOKEDEC))
return FALSE; return FALSE;
#endif
return TRUE; return TRUE;
} }

View file

@ -66,12 +66,10 @@ static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420") "; " GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; " ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR "; "
GST_VIDEO_CAPS_GRAY8)
); );
/* *INDENT-ON* */ /* *INDENT-ON* */
static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template = static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
@ -88,31 +86,12 @@ GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
#define GST_CAT_DEFAULT jpeg_dec_debug #define GST_CAT_DEFAULT jpeg_dec_debug
GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE); GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
/* These macros are adapted from videotestsrc.c
* and/or gst-plugins/gst/games/gstvideoimage.c */
#define I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
#define I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
#define I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(I420_Y_ROWSTRIDE(width)))/2)
#define I420_Y_OFFSET(w,h) (0)
#define I420_U_OFFSET(w,h) (I420_Y_OFFSET(w,h)+(I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
#define I420_V_OFFSET(w,h) (I420_U_OFFSET(w,h)+(I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
static GstElementClass *parent_class; /* NULL */
static void gst_jpeg_dec_base_init (gpointer g_class);
static void gst_jpeg_dec_class_init (GstJpegDecClass * klass);
static void gst_jpeg_dec_init (GstJpegDec * jpegdec);
static void gst_jpeg_dec_set_property (GObject * object, guint prop_id, static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
static void gst_jpeg_dec_get_property (GObject * object, guint prop_id, static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_jpeg_dec_chain (GstPad * pad, GstBuffer * buffer); static GstFlowReturn gst_jpeg_dec_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_jpeg_dec_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event); static gboolean gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event);
static gboolean gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event); static gboolean gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event);
static GstStateChangeReturn gst_jpeg_dec_change_state (GstElement * element, static GstStateChangeReturn gst_jpeg_dec_change_state (GstElement * element,
@ -123,29 +102,8 @@ static void gst_jpeg_dec_reset_qos (GstJpegDec * dec);
static void gst_jpeg_dec_read_qos (GstJpegDec * dec, gdouble * proportion, static void gst_jpeg_dec_read_qos (GstJpegDec * dec, gdouble * proportion,
GstClockTime * time); GstClockTime * time);
GType #define gst_jpeg_dec_parent_class parent_class
gst_jpeg_dec_get_type (void) G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_ELEMENT);
{
static GType type = 0;
if (!type) {
static const GTypeInfo jpeg_dec_info = {
sizeof (GstJpegDecClass),
(GBaseInitFunc) gst_jpeg_dec_base_init,
NULL,
(GClassInitFunc) gst_jpeg_dec_class_init,
NULL,
NULL,
sizeof (GstJpegDec),
0,
(GInstanceInitFunc) gst_jpeg_dec_init,
};
type = g_type_register_static (GST_TYPE_ELEMENT, "GstJpegDec",
&jpeg_dec_info, 0);
}
return type;
}
static void static void
gst_jpeg_dec_finalize (GObject * object) gst_jpeg_dec_finalize (GObject * object)
@ -159,20 +117,6 @@ gst_jpeg_dec_finalize (GObject * object)
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
static void
gst_jpeg_dec_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
gst_element_class_set_details_simple (element_class, "JPEG image decoder",
"Codec/Decoder/Image",
"Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
}
static void static void
gst_jpeg_dec_class_init (GstJpegDecClass * klass) gst_jpeg_dec_class_init (GstJpegDecClass * klass)
{ {
@ -209,6 +153,14 @@ gst_jpeg_dec_class_init (GstJpegDecClass * klass)
-1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS, -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
gst_element_class_set_details_simple (gstelement_class, "JPEG image decoder",
"Codec/Decoder/Image",
"Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
gstelement_class->change_state = gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_jpeg_dec_change_state); GST_DEBUG_FUNCPTR (gst_jpeg_dec_change_state);
@ -404,8 +356,6 @@ gst_jpeg_dec_init (GstJpegDec * dec)
gst_pad_new_from_static_template (&gst_jpeg_dec_sink_pad_template, gst_pad_new_from_static_template (&gst_jpeg_dec_sink_pad_template,
"sink"); "sink");
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad); gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
gst_pad_set_setcaps_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpeg_dec_setcaps));
gst_pad_set_chain_function (dec->sinkpad, gst_pad_set_chain_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpeg_dec_chain)); GST_DEBUG_FUNCPTR (gst_jpeg_dec_chain));
gst_pad_set_event_function (dec->sinkpad, gst_pad_set_event_function (dec->sinkpad,
@ -743,21 +693,19 @@ guarantee_huff_tables (j_decompress_ptr dinfo)
} }
static gboolean static gboolean
gst_jpeg_dec_setcaps (GstPad * pad, GstCaps * caps) gst_jpeg_dec_setcaps (GstJpegDec * dec, GstCaps * caps)
{ {
GstStructure *s; GstStructure *s;
GstJpegDec *dec;
const GValue *framerate; const GValue *framerate;
dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
s = gst_caps_get_structure (caps, 0); s = gst_caps_get_structure (caps, 0);
if ((framerate = gst_structure_get_value (s, "framerate")) != NULL) { if ((framerate = gst_structure_get_value (s, "framerate")) != NULL) {
dec->framerate_numerator = gst_value_get_fraction_numerator (framerate); dec->in_fps_n = gst_value_get_fraction_numerator (framerate);
dec->framerate_denominator = gst_value_get_fraction_denominator (framerate); dec->in_fps_d = gst_value_get_fraction_denominator (framerate);
dec->packetized = TRUE; dec->packetized = TRUE;
GST_DEBUG ("got framerate of %d/%d fps => packetized mode", GST_DEBUG ("got framerate of %d/%d fps => packetized mode",
dec->framerate_numerator, dec->framerate_denominator); dec->in_fps_n, dec->in_fps_d);
} }
/* do not extract width/height here. we do that in the chain /* do not extract width/height here. we do that in the chain
@ -806,7 +754,7 @@ gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
{ {
gint i; gint i;
if (G_LIKELY (dec->idr_width_allocated == maxrowbytes)) if (G_LIKELY (dec->idr_width_allocated >= maxrowbytes))
return TRUE; return TRUE;
/* FIXME: maybe just alloc one or three blocks altogether? */ /* FIXME: maybe just alloc one or three blocks altogether? */
@ -827,19 +775,28 @@ gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
} }
static void static void
gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, guchar * base[1], gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
guint width, guint height, guint pstride, guint rstride)
{ {
guchar *rows[16]; guchar *rows[16];
guchar **scanarray[1] = { rows }; guchar **scanarray[1] = { rows };
gint i, j, k; gint i, j, k;
gint lines; gint lines;
guint8 *base[1];
gint width, height;
gint pstride, rstride;
GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale"); GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width)))) if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return; return;
base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
memcpy (rows, dec->idr_y, 16 * sizeof (gpointer)); memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
i = 0; i = 0;
@ -863,19 +820,30 @@ gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, guchar * base[1],
} }
static void static void
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, guchar * base[3], gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
guint width, guint height, guint pstride, guint rstride)
{ {
guchar *r_rows[16], *g_rows[16], *b_rows[16]; guchar *r_rows[16], *g_rows[16], *b_rows[16];
guchar **scanarray[3] = { r_rows, g_rows, b_rows }; guchar **scanarray[3] = { r_rows, g_rows, b_rows };
gint i, j, k; gint i, j, k;
gint lines; gint lines;
guint8 *base[3];
guint pstride, rstride;
gint width, height;
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB"); GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width)))) if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return; return;
for (i = 0; i < 3; i++)
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer)); memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer)); memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer)); memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
@ -905,20 +873,35 @@ gst_jpeg_dec_decode_rgb (GstJpegDec * dec, guchar * base[3],
} }
static void static void
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3], gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame,
guchar * last[3], guint width, guint height, gint r_v, gint r_h, gint comp) gint r_v, gint r_h, gint comp)
{ {
guchar *y_rows[16], *u_rows[16], *v_rows[16]; guchar *y_rows[16], *u_rows[16], *v_rows[16];
guchar **scanarray[3] = { y_rows, u_rows, v_rows }; guchar **scanarray[3] = { y_rows, u_rows, v_rows };
gint i, j, k; gint i, j, k;
gint lines; gint lines;
guchar *base[3], *last[3];
gint stride[3];
gint width, height;
GST_DEBUG_OBJECT (dec, GST_DEBUG_OBJECT (dec,
"unadvantageous width or r_h, taking slow route involving memcpy"); "unadvantageous width or r_h, taking slow route involving memcpy");
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width)))) if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return; return;
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
}
memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer)); memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer)); memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer)); memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
@ -937,30 +920,30 @@ gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
if (G_LIKELY (lines > 0)) { if (G_LIKELY (lines > 0)) {
for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) { for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
if (G_LIKELY (base[0] <= last[0])) { if (G_LIKELY (base[0] <= last[0])) {
memcpy (base[0], y_rows[j], I420_Y_ROWSTRIDE (width)); memcpy (base[0], y_rows[j], stride[0]);
base[0] += I420_Y_ROWSTRIDE (width); base[0] += stride[0];
} }
if (r_v == 2) { if (r_v == 2) {
if (G_LIKELY (base[0] <= last[0])) { if (G_LIKELY (base[0] <= last[0])) {
memcpy (base[0], y_rows[j + 1], I420_Y_ROWSTRIDE (width)); memcpy (base[0], y_rows[j + 1], stride[0]);
base[0] += I420_Y_ROWSTRIDE (width); base[0] += stride[0];
} }
} }
if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) { if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
if (r_h == 2) { if (r_h == 2) {
memcpy (base[1], u_rows[k], I420_U_ROWSTRIDE (width)); memcpy (base[1], u_rows[k], stride[1]);
memcpy (base[2], v_rows[k], I420_V_ROWSTRIDE (width)); memcpy (base[2], v_rows[k], stride[2]);
} else if (r_h == 1) { } else if (r_h == 1) {
hresamplecpy1 (base[1], u_rows[k], I420_U_ROWSTRIDE (width)); hresamplecpy1 (base[1], u_rows[k], stride[1]);
hresamplecpy1 (base[2], v_rows[k], I420_V_ROWSTRIDE (width)); hresamplecpy1 (base[2], v_rows[k], stride[2]);
} else { } else {
/* FIXME: implement (at least we avoid crashing by doing nothing) */ /* FIXME: implement (at least we avoid crashing by doing nothing) */
} }
} }
if (r_v == 2 || (k & 1) != 0) { if (r_v == 2 || (k & 1) != 0) {
base[1] += I420_U_ROWSTRIDE (width); base[1] += stride[1];
base[2] += I420_V_ROWSTRIDE (width); base[2] += stride[2];
} }
} }
} else { } else {
@ -969,27 +952,8 @@ gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
} }
} }
#ifndef GST_DISABLE_GST_DEBUG
static inline void
dump_lines (guchar * base[3], guchar ** line[3], int v_samp0, int width)
{
int j;
for (j = 0; j < (v_samp0 * DCTSIZE); ++j) {
GST_LOG ("[%02d] %5d %5d %5d", j,
(line[0][j] >= base[0]) ?
(int) (line[0][j] - base[0]) / I420_Y_ROWSTRIDE (width) : -1,
(line[1][j] >= base[1]) ?
(int) (line[1][j] - base[1]) / I420_U_ROWSTRIDE (width) : -1,
(line[2][j] >= base[2]) ?
(int) (line[2][j] - base[2]) / I420_V_ROWSTRIDE (width) : -1);
}
}
#endif
static GstFlowReturn static GstFlowReturn
gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3], gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
guchar * last[3], guint width, guint height)
{ {
guchar **line[3]; /* the jpeg line buffer */ guchar **line[3]; /* the jpeg line buffer */
guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */ guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
@ -997,6 +961,9 @@ gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
guchar *v[4 * DCTSIZE] = { NULL, }; guchar *v[4 * DCTSIZE] = { NULL, };
gint i, j; gint i, j;
gint lines, v_samp[3]; gint lines, v_samp[3];
guchar *base[3], *last[3];
gint stride[3];
guint width, height;
line[0] = y; line[0] = y;
line[1] = u; line[1] = u;
@ -1009,35 +976,45 @@ gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2)) if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
goto format_not_supported; goto format_not_supported;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
}
/* let jpeglib decode directly into our final buffer */ /* let jpeglib decode directly into our final buffer */
GST_DEBUG_OBJECT (dec, "decoding directly into output buffer"); GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
for (i = 0; i < height; i += v_samp[0] * DCTSIZE) { for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) { for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
/* Y */ /* Y */
line[0][j] = base[0] + (i + j) * I420_Y_ROWSTRIDE (width); line[0][j] = base[0] + (i + j) * stride[0];
if (G_UNLIKELY (line[0][j] > last[0])) if (G_UNLIKELY (line[0][j] > last[0]))
line[0][j] = last[0]; line[0][j] = last[0];
/* U */ /* U */
if (v_samp[1] == v_samp[0]) { if (v_samp[1] == v_samp[0]) {
line[1][j] = base[1] + ((i + j) / 2) * I420_U_ROWSTRIDE (width); line[1][j] = base[1] + ((i + j) / 2) * stride[1];
} else if (j < (v_samp[1] * DCTSIZE)) { } else if (j < (v_samp[1] * DCTSIZE)) {
line[1][j] = base[1] + ((i / 2) + j) * I420_U_ROWSTRIDE (width); line[1][j] = base[1] + ((i / 2) + j) * stride[1];
} }
if (G_UNLIKELY (line[1][j] > last[1])) if (G_UNLIKELY (line[1][j] > last[1]))
line[1][j] = last[1]; line[1][j] = last[1];
/* V */ /* V */
if (v_samp[2] == v_samp[0]) { if (v_samp[2] == v_samp[0]) {
line[2][j] = base[2] + ((i + j) / 2) * I420_V_ROWSTRIDE (width); line[2][j] = base[2] + ((i + j) / 2) * stride[2];
} else if (j < (v_samp[2] * DCTSIZE)) { } else if (j < (v_samp[2] * DCTSIZE)) {
line[2][j] = base[2] + ((i / 2) + j) * I420_V_ROWSTRIDE (width); line[2][j] = base[2] + ((i / 2) + j) * stride[2];
} }
if (G_UNLIKELY (line[2][j] > last[2])) if (G_UNLIKELY (line[2][j] > last[2]))
line[2][j] = last[2]; line[2][j] = last[2];
} }
/* dump_lines (base, line, v_samp[0], width); */
lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE); lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
if (G_UNLIKELY (!lines)) { if (G_UNLIKELY (!lines)) {
GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0"); GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
@ -1127,38 +1104,96 @@ gst_jpeg_dec_do_qos (GstJpegDec * dec, GstClockTime timestamp)
return TRUE; return TRUE;
} }
static void static gboolean
gst_jpeg_dec_buffer_pool (GstJpegDec * dec, GstCaps * caps)
{
GstQuery *query;
GstBufferPool *pool = NULL;
guint size, min, max, prefix, alignment;
GstStructure *config;
GST_DEBUG_OBJECT (dec, "setting up bufferpool");
/* find a pool for the negotiated caps now */
query = gst_query_new_allocation (caps, TRUE);
if (gst_pad_peer_query (dec->srcpad, query)) {
/* we got configuration from our peer, parse them */
gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
&alignment, &pool);
size = MAX (size, dec->info.size);
} else {
GST_DEBUG_OBJECT (dec, "peer query failed, using defaults");
size = dec->info.size;
min = max = 0;
prefix = 0;
alignment = 15;
}
gst_query_unref (query);
if (pool == NULL) {
/* we did not get a pool, make one ourselves then */
pool = gst_buffer_pool_new ();
}
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set (config, caps, size, min, max, prefix,
alignment | 15);
/* and store */
gst_buffer_pool_set_config (pool, config);
if (dec->pool) {
gst_buffer_pool_set_active (dec->pool, FALSE);
gst_object_unref (dec->pool);
}
dec->pool = pool;
/* and activate */
gst_buffer_pool_set_active (pool, TRUE);
return TRUE;
}
static gboolean
gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc) gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
{ {
GstCaps *caps; GstCaps *caps;
GstVideoFormat format; GstVideoFormat format;
GstVideoInfo info;
if (G_UNLIKELY (width == dec->caps_width && height == dec->caps_height && if (G_UNLIKELY (width == dec->info.width && height == dec->info.height &&
dec->framerate_numerator == dec->caps_framerate_numerator && dec->in_fps_n == dec->info.fps_n && dec->in_fps_d == dec->info.fps_d
dec->framerate_denominator == dec->caps_framerate_denominator && && clrspc == dec->clrspc))
clrspc == dec->clrspc)) return TRUE;
return;
gst_video_info_init (&info);
/* framerate == 0/1 is a still frame */ /* framerate == 0/1 is a still frame */
if (dec->framerate_denominator == 0) { if (dec->in_fps_d == 0) {
dec->framerate_numerator = 0; info.fps_n = 0;
dec->framerate_denominator = 1; info.fps_d = 1;
} else {
info.fps_n = dec->in_fps_n;
info.fps_d = dec->in_fps_d;
} }
/* calculate or assume an average frame duration for QoS purposes */ /* calculate or assume an average frame duration for QoS purposes */
GST_OBJECT_LOCK (dec); GST_OBJECT_LOCK (dec);
if (dec->framerate_numerator != 0) { if (info.fps_n != 0) {
dec->qos_duration = gst_util_uint64_scale (GST_SECOND, dec->qos_duration =
dec->framerate_denominator, dec->framerate_numerator); gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
dec->duration = dec->qos_duration;
} else { } else {
/* if not set just use 25fps */ /* if not set just use 25fps */
dec->qos_duration = gst_util_uint64_scale (GST_SECOND, 1, 25); dec->qos_duration = gst_util_uint64_scale (GST_SECOND, 1, 25);
dec->duration = GST_CLOCK_TIME_NONE;
} }
GST_OBJECT_UNLOCK (dec); GST_OBJECT_UNLOCK (dec);
if (dec->cinfo.jpeg_color_space == JCS_RGB) { if (dec->cinfo.jpeg_color_space == JCS_RGB) {
gint i; gint i;
GstCaps *allowed_caps; GstCaps *allowed_caps;
GstVideoInfo tmpinfo;
GST_DEBUG_OBJECT (dec, "selecting RGB format"); GST_DEBUG_OBJECT (dec, "selecting RGB format");
/* retrieve allowed caps, and find the first one that reasonably maps /* retrieve allowed caps, and find the first one that reasonably maps
@ -1170,7 +1205,7 @@ gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
* and get_pad_template_caps doesn't */ * and get_pad_template_caps doesn't */
caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
} }
/* avoid lists of fourcc, etc */ /* avoid lists of formats, etc */
allowed_caps = gst_caps_normalize (caps); allowed_caps = gst_caps_normalize (caps);
gst_caps_unref (caps); gst_caps_unref (caps);
caps = NULL; caps = NULL;
@ -1183,10 +1218,11 @@ gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
/* sigh, ds and _parse_caps need fixed caps for parsing, fixate */ /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */
gst_pad_fixate_caps (dec->srcpad, caps); gst_pad_fixate_caps (dec->srcpad, caps);
GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps); GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps);
if (!gst_video_format_parse_caps (caps, &format, NULL, NULL))
if (!gst_video_info_from_caps (&tmpinfo, caps))
continue; continue;
/* we'll settle for the first (preferred) downstream rgb format */ /* we'll settle for the first (preferred) downstream rgb format */
if (gst_video_format_is_rgb (format)) if (GST_VIDEO_INFO_IS_RGB (&tmpinfo))
break; break;
/* default fall-back */ /* default fall-back */
format = GST_VIDEO_FORMAT_RGB; format = GST_VIDEO_FORMAT_RGB;
@ -1194,51 +1230,31 @@ gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
if (caps) if (caps)
gst_caps_unref (caps); gst_caps_unref (caps);
gst_caps_unref (allowed_caps); gst_caps_unref (allowed_caps);
caps = gst_video_format_new_caps (format, width, height,
dec->framerate_numerator, dec->framerate_denominator, 1, 1);
dec->outsize = gst_video_format_get_size (format, width, height);
/* some format info */
dec->offset[0] =
gst_video_format_get_component_offset (format, 0, width, height);
dec->offset[1] =
gst_video_format_get_component_offset (format, 1, width, height);
dec->offset[2] =
gst_video_format_get_component_offset (format, 2, width, height);
/* equal for all components */
dec->stride = gst_video_format_get_row_stride (format, 0, width);
dec->inc = gst_video_format_get_pixel_stride (format, 0);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) { } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
/* TODO is anything else then 8bit supported in jpeg? */ /* TODO is anything else then 8bit supported in jpeg? */
format = GST_VIDEO_FORMAT_GRAY8; format = GST_VIDEO_FORMAT_GRAY8;
caps = gst_video_format_new_caps (format, width, height,
dec->framerate_numerator, dec->framerate_denominator, 1, 1);
dec->outsize = gst_video_format_get_size (format, width, height);
dec->offset[0] =
gst_video_format_get_component_offset (format, 0, width, height);
dec->stride = gst_video_format_get_row_stride (format, 0, width);
dec->inc = gst_video_format_get_pixel_stride (format, 0);
} else { } else {
/* go for plain and simple I420 */ /* go for plain and simple I420 */
/* TODO other YUV cases ? */ /* TODO other YUV cases ? */
caps = gst_caps_new_simple ("video/x-raw-yuv", format = GST_VIDEO_FORMAT_I420;
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
"width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, dec->framerate_numerator,
dec->framerate_denominator, NULL);
dec->outsize = I420_SIZE (width, height);
} }
gst_video_info_set_format (&info, format, width, height);
caps = gst_video_info_to_caps (&info);
GST_DEBUG_OBJECT (dec, "setting caps %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (dec, "setting caps %" GST_PTR_FORMAT, caps);
GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor); GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor); GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
gst_pad_set_caps (dec->srcpad, caps); gst_pad_set_caps (dec->srcpad, caps);
dec->info = info;
dec->clrspc = clrspc;
gst_jpeg_dec_buffer_pool (dec, caps);
gst_caps_unref (caps); gst_caps_unref (caps);
dec->caps_width = width; return TRUE;
dec->caps_height = height;
dec->caps_framerate_numerator = dec->framerate_numerator;
dec->caps_framerate_denominator = dec->framerate_denominator;
} }
static GstFlowReturn static GstFlowReturn
@ -1247,17 +1263,12 @@ gst_jpeg_dec_chain (GstPad * pad, GstBuffer * buf)
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GstJpegDec *dec; GstJpegDec *dec;
GstBuffer *outbuf = NULL; GstBuffer *outbuf = NULL;
#ifndef GST_DISABLE_GST_DEBUG
guchar *data;
#endif
guchar *outdata;
guchar *base[3], *last[3];
gint img_len; gint img_len;
guint outsize;
gint width, height; gint width, height;
gint r_h, r_v; gint r_h, r_v;
guint code, hdr_ok; guint code, hdr_ok;
GstClockTime timestamp, duration; GstClockTime timestamp, duration;
GstVideoFrame frame;
dec = GST_JPEG_DEC (GST_PAD_PARENT (pad)); dec = GST_JPEG_DEC (GST_PAD_PARENT (pad));
@ -1323,9 +1334,13 @@ again:
goto skip_decoding; goto skip_decoding;
#ifndef GST_DISABLE_GST_DEBUG #ifndef GST_DISABLE_GST_DEBUG
data = (guint8 *) gst_adapter_peek (dec->adapter, 4); {
guchar data[4];
gst_adapter_copy (dec->adapter, data, 0, 4);
GST_LOG_OBJECT (dec, "reading header %02x %02x %02x %02x", data[0], data[1], GST_LOG_OBJECT (dec, "reading header %02x %02x %02x %02x", data[0], data[1],
data[2], data[3]); data[2], data[3]);
}
#endif #endif
gst_jpeg_dec_fill_input_buffer (&dec->cinfo); gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
@ -1425,16 +1440,14 @@ again:
gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space); gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
ret = gst_pad_alloc_buffer_and_set_caps (dec->srcpad, GST_BUFFER_OFFSET_NONE, ret = gst_buffer_pool_acquire_buffer (dec->pool, &outbuf, NULL);
dec->outsize, GST_PAD_CAPS (dec->srcpad), &outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed; goto alloc_failed;
outdata = GST_BUFFER_DATA (outbuf); if (!gst_video_frame_map (&frame, &dec->info, outbuf, GST_MAP_READWRITE))
outsize = GST_BUFFER_SIZE (outbuf); goto invalid_frame;
GST_LOG_OBJECT (dec, "width %d, height %d, buffer size %d, required size %d", GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
width, height, outsize, dec->outsize);
GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts; GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts;
@ -1442,10 +1455,9 @@ again:
if (GST_CLOCK_TIME_IS_VALID (duration)) { if (GST_CLOCK_TIME_IS_VALID (duration)) {
/* use duration from incoming buffer for outgoing buffer */ /* use duration from incoming buffer for outgoing buffer */
dec->next_ts += duration; dec->next_ts += duration;
} else if (dec->framerate_numerator != 0) { } else if (GST_CLOCK_TIME_IS_VALID (dec->duration)) {
duration = gst_util_uint64_scale (GST_SECOND, duration = dec->duration;
dec->framerate_denominator, dec->framerate_numerator); dec->next_ts += dec->duration;
dec->next_ts += duration;
} else { } else {
duration = GST_CLOCK_TIME_NONE; duration = GST_CLOCK_TIME_NONE;
dec->next_ts = GST_CLOCK_TIME_NONE; dec->next_ts = GST_CLOCK_TIME_NONE;
@ -1457,32 +1469,10 @@ again:
GST_BUFFER_DURATION (outbuf) = duration; GST_BUFFER_DURATION (outbuf) = duration;
if (dec->cinfo.jpeg_color_space == JCS_RGB) { if (dec->cinfo.jpeg_color_space == JCS_RGB) {
base[0] = outdata + dec->offset[0]; gst_jpeg_dec_decode_rgb (dec, &frame);
base[1] = outdata + dec->offset[1];
base[2] = outdata + dec->offset[2];
gst_jpeg_dec_decode_rgb (dec, base, width, height, dec->inc, dec->stride);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) { } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
base[0] = outdata + dec->offset[0]; gst_jpeg_dec_decode_grayscale (dec, &frame);
gst_jpeg_dec_decode_grayscale (dec, base, width, height, dec->inc,
dec->stride);
} else { } else {
/* mind the swap, jpeglib outputs blue chroma first
* ensonic: I see no swap?
*/
base[0] = outdata + I420_Y_OFFSET (width, height);
base[1] = outdata + I420_U_OFFSET (width, height);
base[2] = outdata + I420_V_OFFSET (width, height);
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[0] = base[0] + (I420_Y_ROWSTRIDE (width) * (height - 1));
last[1] =
base[1] + (I420_U_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
1));
last[2] =
base[2] + (I420_V_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
1));
GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)", GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
dec->cinfo.rec_outbuf_height); dec->cinfo.rec_outbuf_height);
@ -1497,11 +1487,10 @@ again:
|| dec->cinfo.comp_info[2].h_samp_factor != 1)) { || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec, GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
"indirect decoding using extra buffer copy"); "indirect decoding using extra buffer copy");
gst_jpeg_dec_decode_indirect (dec, base, last, width, height, r_v, r_h, gst_jpeg_dec_decode_indirect (dec, &frame, r_v, r_h,
dec->cinfo.num_components); dec->cinfo.num_components);
} else { } else {
ret = gst_jpeg_dec_decode_direct (dec, base, last, width, height); ret = gst_jpeg_dec_decode_direct (dec, &frame);
if (G_UNLIKELY (ret != GST_FLOW_OK)) if (G_UNLIKELY (ret != GST_FLOW_OK))
goto decode_direct_failed; goto decode_direct_failed;
} }
@ -1510,9 +1499,11 @@ again:
GST_LOG_OBJECT (dec, "decompressing finished"); GST_LOG_OBJECT (dec, "decompressing finished");
jpeg_finish_decompress (&dec->cinfo); jpeg_finish_decompress (&dec->cinfo);
gst_video_frame_unmap (&frame);
/* Clipping */ /* Clipping */
if (dec->segment.format == GST_FORMAT_TIME) { if (dec->segment.format == GST_FORMAT_TIME) {
gint64 start, stop, clip_start, clip_stop; guint64 start, stop, clip_start, clip_stop;
GST_LOG_OBJECT (dec, "Attempting clipping"); GST_LOG_OBJECT (dec, "Attempting clipping");
@ -1618,6 +1609,13 @@ alloc_failed:
} }
goto exit; goto exit;
} }
invalid_frame:
{
jpeg_abort_decompress (&dec->cinfo);
gst_buffer_unref (outbuf);
ret = GST_FLOW_OK;
goto exit;
}
drop_buffer: drop_buffer:
{ {
GST_WARNING_OBJECT (dec, "Outgoing buffer is outside configured segment"); GST_WARNING_OBJECT (dec, "Outgoing buffer is outside configured segment");
@ -1663,11 +1661,12 @@ gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event)
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_QOS:{ case GST_EVENT_QOS:{
GstQOSType type;
GstClockTimeDiff diff; GstClockTimeDiff diff;
GstClockTime timestamp; GstClockTime timestamp;
gdouble proportion; gdouble proportion;
gst_event_parse_qos (event, &proportion, &diff, &timestamp); gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
gst_jpeg_dec_update_qos (dec, proportion, diff, timestamp); gst_jpeg_dec_update_qos (dec, proportion, diff, timestamp);
break; break;
} }
@ -1684,7 +1683,7 @@ gst_jpeg_dec_src_event (GstPad * pad, GstEvent * event)
static gboolean static gboolean
gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event) gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event)
{ {
gboolean ret = TRUE; gboolean ret = TRUE, forward = TRUE;
GstJpegDec *dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad)); GstJpegDec *dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
GST_DEBUG_OBJECT (dec, "event : %s", GST_EVENT_TYPE_NAME (event)); GST_DEBUG_OBJECT (dec, "event : %s", GST_EVENT_TYPE_NAME (event));
@ -1702,30 +1701,28 @@ gst_jpeg_dec_sink_event (GstPad * pad, GstEvent * event)
dec->parse_resync = FALSE; dec->parse_resync = FALSE;
gst_jpeg_dec_reset_qos (dec); gst_jpeg_dec_reset_qos (dec);
break; break;
case GST_EVENT_NEWSEGMENT:{ case GST_EVENT_SEGMENT:
gboolean update; gst_event_copy_segment (event, &dec->segment);
gdouble rate, applied_rate; GST_DEBUG_OBJECT (dec, "Got NEWSEGMENT %" GST_SEGMENT_FORMAT,
GstFormat format; &dec->segment);
gint64 start, stop, position; break;
case GST_EVENT_CAPS:
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, {
&format, &start, &stop, &position); GstCaps *caps;
GST_DEBUG_OBJECT (dec, "Got NEWSEGMENT [%" GST_TIME_FORMAT
" - %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "]",
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
GST_TIME_ARGS (position));
gst_segment_set_newsegment_full (&dec->segment, update, rate,
applied_rate, format, start, stop, position);
gst_event_parse_caps (event, &caps);
ret = gst_jpeg_dec_setcaps (dec, caps);
forward = FALSE;
break; break;
} }
default: default:
break; break;
} }
if (forward)
ret = gst_pad_push_event (dec->srcpad, event); ret = gst_pad_push_event (dec->srcpad, event);
else
gst_event_unref (event);
return ret; return ret;
} }
@ -1786,11 +1783,9 @@ gst_jpeg_dec_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
dec->error_count = 0; dec->error_count = 0;
dec->good_count = 0; dec->good_count = 0;
dec->framerate_numerator = 0; dec->in_fps_n = 0;
dec->framerate_denominator = 1; dec->in_fps_d = 1;
dec->caps_framerate_numerator = dec->caps_framerate_denominator = 0; gst_video_info_init (&dec->info);
dec->caps_width = -1;
dec->caps_height = -1;
dec->clrspc = -1; dec->clrspc = -1;
dec->packetized = FALSE; dec->packetized = FALSE;
dec->next_ts = 0; dec->next_ts = 0;
@ -1815,6 +1810,11 @@ gst_jpeg_dec_change_state (GstElement * element, GstStateChange transition)
g_free (dec->cur_buf); g_free (dec->cur_buf);
dec->cur_buf = NULL; dec->cur_buf = NULL;
gst_jpeg_dec_free_buffers (dec); gst_jpeg_dec_free_buffers (dec);
if (dec->pool) {
gst_buffer_pool_set_active (dec->pool, FALSE);
gst_object_unref (dec->pool);
}
dec->pool = NULL;
break; break;
default: default:
break; break;

View file

@ -90,22 +90,17 @@ struct _GstJpegDec {
GstClockTime earliest_time; GstClockTime earliest_time;
GstClockTime qos_duration; GstClockTime qos_duration;
/* video state */ /* input state */
gint framerate_numerator; gint in_fps_n;
gint framerate_denominator; gint in_fps_d;
/* negotiated output state */
GstBufferPool *pool;
GstVideoInfo info;
GstClockTime duration;
/* negotiated state */
gint caps_framerate_numerator;
gint caps_framerate_denominator;
gint caps_width;
gint caps_height;
gint outsize;
gint clrspc; gint clrspc;
gint offset[3];
gint stride;
gint inc;
/* parse state */ /* parse state */
gint parse_offset; gint parse_offset;
gint parse_entropy_len; gint parse_entropy_len;

View file

@ -54,7 +54,6 @@ GST_DEBUG_CATEGORY_STATIC (jpegenc_debug);
/* JpegEnc signals and args */ /* JpegEnc signals and args */
enum enum
{ {
FRAME_ENCODED,
/* FILL ME */ /* FILL ME */
LAST_SIGNAL LAST_SIGNAL
}; };
@ -71,8 +70,8 @@ static void gst_jpegenc_reset (GstJpegEnc * enc);
static void gst_jpegenc_finalize (GObject * object); static void gst_jpegenc_finalize (GObject * object);
static GstFlowReturn gst_jpegenc_chain (GstPad * pad, GstBuffer * buf); static GstFlowReturn gst_jpegenc_chain (GstPad * pad, GstBuffer * buf);
static gboolean gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps); static gboolean gst_jpegenc_sink_event (GstPad * pad, GstEvent * event);
static GstCaps *gst_jpegenc_getcaps (GstPad * pad); static GstCaps *gst_jpegenc_getcaps (GstPad * pad, GstCaps * filter);
static void gst_jpegenc_resync (GstJpegEnc * jpegenc); static void gst_jpegenc_resync (GstJpegEnc * jpegenc);
static void gst_jpegenc_set_property (GObject * object, guint prop_id, static void gst_jpegenc_set_property (GObject * object, guint prop_id,
@ -82,8 +81,6 @@ static void gst_jpegenc_get_property (GObject * object, guint prop_id,
static GstStateChangeReturn gst_jpegenc_change_state (GstElement * element, static GstStateChangeReturn gst_jpegenc_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
static guint gst_jpegenc_signals[LAST_SIGNAL] = { 0 };
#define gst_jpegenc_parent_class parent_class #define gst_jpegenc_parent_class parent_class
G_DEFINE_TYPE (GstJpegEnc, gst_jpegenc, GST_TYPE_ELEMENT); G_DEFINE_TYPE (GstJpegEnc, gst_jpegenc, GST_TYPE_ELEMENT);
@ -121,11 +118,6 @@ gst_jpegenc_class_init (GstJpegEncClass * klass)
gobject_class->set_property = gst_jpegenc_set_property; gobject_class->set_property = gst_jpegenc_set_property;
gobject_class->get_property = gst_jpegenc_get_property; gobject_class->get_property = gst_jpegenc_get_property;
gst_jpegenc_signals[FRAME_ENCODED] =
g_signal_new ("frame-encoded", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstJpegEncClass, frame_encoded), NULL,
NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
g_object_class_install_property (gobject_class, PROP_QUALITY, g_object_class_install_property (gobject_class, PROP_QUALITY,
g_param_spec_int ("quality", "Quality", "Quality of encoding", g_param_spec_int ("quality", "Quality", "Quality of encoding",
0, 100, JPEG_DEFAULT_QUALITY, 0, 100, JPEG_DEFAULT_QUALITY,
@ -159,6 +151,42 @@ gst_jpegenc_class_init (GstJpegEncClass * klass)
"JPEG encoding element"); "JPEG encoding element");
} }
static void
ensure_memory (GstJpegEnc * jpegenc)
{
GstMemory *new_memory;
gsize old_size, desired_size, new_size;
guint8 *new_data;
old_size = jpegenc->output_size;
if (old_size == 0)
desired_size = jpegenc->bufsize;
else
desired_size = old_size * 2;
/* Our output memory wasn't big enough.
* Make a new memory that's twice the size, */
new_memory = gst_allocator_alloc (NULL, desired_size, 3);
new_data = gst_memory_map (new_memory, &new_size, NULL, GST_MAP_READWRITE);
/* copy previous data if any */
if (jpegenc->output_mem) {
memcpy (new_data, jpegenc->output_data, old_size);
gst_memory_unmap (jpegenc->output_mem, jpegenc->output_data,
jpegenc->output_size);
gst_memory_unref (jpegenc->output_mem);
}
/* drop it into place, */
jpegenc->output_mem = new_memory;
jpegenc->output_data = new_data;
jpegenc->output_size = new_size;
/* and last, update libjpeg on where to work. */
jpegenc->jdest.next_output_byte = new_data + old_size;
jpegenc->jdest.free_in_buffer = new_size - old_size;
}
static void static void
gst_jpegenc_init_destination (j_compress_ptr cinfo) gst_jpegenc_init_destination (j_compress_ptr cinfo)
{ {
@ -168,34 +196,12 @@ gst_jpegenc_init_destination (j_compress_ptr cinfo)
static boolean static boolean
gst_jpegenc_flush_destination (j_compress_ptr cinfo) gst_jpegenc_flush_destination (j_compress_ptr cinfo)
{ {
GstBuffer *overflow_buffer;
guint32 old_buffer_size;
GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data); GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
GST_DEBUG_OBJECT (jpegenc, GST_DEBUG_OBJECT (jpegenc,
"gst_jpegenc_chain: flush_destination: buffer too small"); "gst_jpegenc_chain: flush_destination: buffer too small");
/* Our output buffer wasn't big enough. ensure_memory (jpegenc);
* Make a new buffer that's twice the size, */
old_buffer_size = GST_BUFFER_SIZE (jpegenc->output_buffer);
gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
GST_BUFFER_OFFSET_NONE, old_buffer_size * 2,
GST_PAD_CAPS (jpegenc->srcpad), &overflow_buffer);
memcpy (GST_BUFFER_DATA (overflow_buffer),
GST_BUFFER_DATA (jpegenc->output_buffer), old_buffer_size);
gst_buffer_copy_metadata (overflow_buffer, jpegenc->output_buffer,
GST_BUFFER_COPY_TIMESTAMPS);
/* drop it into place, */
gst_buffer_unref (jpegenc->output_buffer);
jpegenc->output_buffer = overflow_buffer;
/* and last, update libjpeg on where to work. */
jpegenc->jdest.next_output_byte =
GST_BUFFER_DATA (jpegenc->output_buffer) + old_buffer_size;
jpegenc->jdest.free_in_buffer =
GST_BUFFER_SIZE (jpegenc->output_buffer) - old_buffer_size;
return TRUE; return TRUE;
} }
@ -206,14 +212,11 @@ gst_jpegenc_term_destination (j_compress_ptr cinfo)
GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data); GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source"); GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source");
/* Trim the buffer size and push it. */ /* Trim the buffer size. we will push it in the chain function */
GST_BUFFER_SIZE (jpegenc->output_buffer) = gst_memory_unmap (jpegenc->output_mem, jpegenc->output_data,
GST_BUFFER_SIZE (jpegenc->output_buffer) - jpegenc->jdest.free_in_buffer; jpegenc->output_size - jpegenc->jdest.free_in_buffer);
jpegenc->output_data = NULL;
g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0); jpegenc->output_size = 0;
jpegenc->last_ret = gst_pad_push (jpegenc->srcpad, jpegenc->output_buffer);
jpegenc->output_buffer = NULL;
} }
static void static void
@ -226,8 +229,8 @@ gst_jpegenc_init (GstJpegEnc * jpegenc)
GST_DEBUG_FUNCPTR (gst_jpegenc_chain)); GST_DEBUG_FUNCPTR (gst_jpegenc_chain));
gst_pad_set_getcaps_function (jpegenc->sinkpad, gst_pad_set_getcaps_function (jpegenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpegenc_getcaps)); GST_DEBUG_FUNCPTR (gst_jpegenc_getcaps));
gst_pad_set_setcaps_function (jpegenc->sinkpad, gst_pad_set_event_function (jpegenc->sinkpad,
GST_DEBUG_FUNCPTR (gst_jpegenc_setcaps)); GST_DEBUG_FUNCPTR (gst_jpegenc_sink_event));
gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->sinkpad); gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->sinkpad);
jpegenc->srcpad = jpegenc->srcpad =
@ -236,8 +239,7 @@ gst_jpegenc_init (GstJpegEnc * jpegenc)
gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->srcpad); gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->srcpad);
/* reset the initial video state */ /* reset the initial video state */
jpegenc->width = -1; gst_video_info_init (&jpegenc->info);
jpegenc->height = -1;
/* setup jpeglib */ /* setup jpeglib */
memset (&jpegenc->cinfo, 0, sizeof (jpegenc->cinfo)); memset (&jpegenc->cinfo, 0, sizeof (jpegenc->cinfo));
@ -277,11 +279,7 @@ gst_jpegenc_reset (GstJpegEnc * enc)
} }
} }
enc->width = -1; gst_video_info_init (&enc->info);
enc->height = -1;
enc->format = GST_VIDEO_FORMAT_UNKNOWN;
enc->fps_den = enc->par_den = 0;
enc->height = enc->width = 0;
} }
static void static void
@ -295,7 +293,7 @@ gst_jpegenc_finalize (GObject * object)
} }
static GstCaps * static GstCaps *
gst_jpegenc_getcaps (GstPad * pad) gst_jpegenc_getcaps (GstPad * pad, GstCaps * filter)
{ {
GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad)); GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad));
GstCaps *caps, *othercaps; GstCaps *caps, *othercaps;
@ -306,7 +304,7 @@ gst_jpegenc_getcaps (GstPad * pad)
/* we want to proxy properties like width, height and framerate from the /* we want to proxy properties like width, height and framerate from the
other end of the element */ other end of the element */
othercaps = gst_pad_peer_get_caps_reffed (jpegenc->srcpad); othercaps = gst_pad_peer_get_caps (jpegenc->srcpad, filter);
if (othercaps == NULL || if (othercaps == NULL ||
gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) { gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) {
caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
@ -343,59 +341,41 @@ done:
} }
static gboolean static gboolean
gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps) gst_jpegenc_setcaps (GstJpegEnc * enc, GstCaps * caps)
{ {
GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad)); GstVideoInfo info;
GstVideoFormat format;
gint width, height;
gint fps_num, fps_den;
gint par_num, par_den;
gint i; gint i;
GstCaps *othercaps; GstCaps *othercaps;
gboolean ret; gboolean ret;
const GstVideoFormatInfo *vinfo;
/* get info from caps */ /* get info from caps */
if (!gst_video_format_parse_caps (caps, &format, &width, &height)) if (!gst_video_info_from_caps (&info, caps))
goto refuse_caps; goto refuse_caps;
/* optional; pass along if present */
fps_num = fps_den = -1;
par_num = par_den = -1;
gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);
if (width == enc->width && height == enc->height && enc->format == format
&& fps_num == enc->fps_num && fps_den == enc->fps_den
&& par_num == enc->par_num && par_den == enc->par_den)
return TRUE;
/* store input description */ /* store input description */
enc->format = format; enc->info = info;
enc->width = width;
enc->height = height; vinfo = info.finfo;
enc->fps_num = fps_num;
enc->fps_den = fps_den;
enc->par_num = par_num;
enc->par_den = par_den;
/* prepare a cached image description */ /* prepare a cached image description */
enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0); enc->channels = 3 + (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (vinfo) ? 1 : 0);
/* ... but any alpha is disregarded in encoding */ /* ... but any alpha is disregarded in encoding */
if (gst_video_format_is_gray (format)) if (GST_VIDEO_FORMAT_INFO_IS_GRAY (vinfo))
enc->channels = 1; enc->channels = 1;
else else
enc->channels = 3; enc->channels = 3;
enc->h_max_samp = 0; enc->h_max_samp = 0;
enc->v_max_samp = 0; enc->v_max_samp = 0;
for (i = 0; i < enc->channels; ++i) { for (i = 0; i < enc->channels; ++i) {
enc->cwidth[i] = gst_video_format_get_component_width (format, i, width); enc->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (&info, i);
enc->cheight[i] = gst_video_format_get_component_height (format, i, height); enc->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (&info, i);
enc->offset[i] = gst_video_format_get_component_offset (format, i, width, enc->inc[i] = GST_VIDEO_INFO_COMP_PSTRIDE (&info, i);
height);
enc->stride[i] = gst_video_format_get_row_stride (format, i, width); enc->h_samp[i] = GST_ROUND_UP_4 (info.width) / enc->cwidth[i];
enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]); enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i]; enc->v_samp[i] = GST_ROUND_UP_4 (info.height) / enc->cheight[i];
enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]); enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
} }
/* samp should only be 1, 2 or 4 */ /* samp should only be 1, 2 or 4 */
@ -411,14 +391,13 @@ gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad)); othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
gst_caps_set_simple (othercaps, gst_caps_set_simple (othercaps,
"width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL); "width", G_TYPE_INT, info.width, "height", G_TYPE_INT, info.height, NULL);
if (enc->fps_den > 0) if (info.fps_d > 0)
gst_caps_set_simple (othercaps, gst_caps_set_simple (othercaps,
"framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL); "framerate", GST_TYPE_FRACTION, info.fps_n, info.fps_d, NULL);
if (enc->par_den > 0) if (info.par_d > 0)
gst_caps_set_simple (othercaps, gst_caps_set_simple (othercaps,
"pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den, "pixel-aspect-ratio", GST_TYPE_FRACTION, info.par_n, info.par_d, NULL);
NULL);
ret = gst_pad_set_caps (enc->srcpad, othercaps); ret = gst_pad_set_caps (enc->srcpad, othercaps);
gst_caps_unref (othercaps); gst_caps_unref (othercaps);
@ -426,38 +405,64 @@ gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
if (ret) if (ret)
gst_jpegenc_resync (enc); gst_jpegenc_resync (enc);
gst_object_unref (enc);
return ret; return ret;
/* ERRORS */ /* ERRORS */
refuse_caps: refuse_caps:
{ {
GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps); GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
gst_object_unref (enc);
return FALSE; return FALSE;
} }
} }
static gboolean
gst_jpegenc_sink_event (GstPad * pad, GstEvent * event)
{
gboolean res;
GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
res = gst_jpegenc_setcaps (enc, caps);
break;
}
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (enc);
return res;
}
static void static void
gst_jpegenc_resync (GstJpegEnc * jpegenc) gst_jpegenc_resync (GstJpegEnc * jpegenc)
{ {
gint width, height; gint width, height;
gint i, j; gint i, j;
const GstVideoFormatInfo *finfo;
GST_DEBUG_OBJECT (jpegenc, "resync"); GST_DEBUG_OBJECT (jpegenc, "resync");
jpegenc->cinfo.image_width = width = jpegenc->width; finfo = jpegenc->info.finfo;
jpegenc->cinfo.image_height = height = jpegenc->height;
jpegenc->cinfo.image_width = width = GST_VIDEO_INFO_WIDTH (&jpegenc->info);
jpegenc->cinfo.image_height = height = GST_VIDEO_INFO_HEIGHT (&jpegenc->info);
jpegenc->cinfo.input_components = jpegenc->channels; jpegenc->cinfo.input_components = jpegenc->channels;
GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height); GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height);
GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format); GST_DEBUG_OBJECT (jpegenc, "format %d",
GST_VIDEO_INFO_FORMAT (&jpegenc->info));
if (gst_video_format_is_rgb (jpegenc->format)) { if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) {
GST_DEBUG_OBJECT (jpegenc, "RGB"); GST_DEBUG_OBJECT (jpegenc, "RGB");
jpegenc->cinfo.in_color_space = JCS_RGB; jpegenc->cinfo.in_color_space = JCS_RGB;
} else if (gst_video_format_is_gray (jpegenc->format)) { } else if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) {
GST_DEBUG_OBJECT (jpegenc, "gray"); GST_DEBUG_OBJECT (jpegenc, "gray");
jpegenc->cinfo.in_color_space = JCS_GRAYSCALE; jpegenc->cinfo.in_color_space = JCS_GRAYSCALE;
} else { } else {
@ -466,7 +471,7 @@ gst_jpegenc_resync (GstJpegEnc * jpegenc)
} }
/* input buffer size as max output */ /* input buffer size as max output */
jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height); jpegenc->bufsize = GST_VIDEO_INFO_SIZE (&jpegenc->info);
jpeg_set_defaults (&jpegenc->cinfo); jpeg_set_defaults (&jpegenc->cinfo);
jpegenc->cinfo.raw_data_in = TRUE; jpegenc->cinfo.raw_data_in = TRUE;
/* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */ /* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */
@ -506,42 +511,41 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
{ {
GstFlowReturn ret; GstFlowReturn ret;
GstJpegEnc *jpegenc; GstJpegEnc *jpegenc;
guchar *data; guint height, width;
gulong size;
guint height;
guchar *base[3], *end[3]; guchar *base[3], *end[3];
guint stride[3];
gint i, j, k; gint i, j, k;
GstBuffer *outbuf;
GstVideoFrame frame;
jpegenc = GST_JPEGENC (GST_OBJECT_PARENT (pad)); jpegenc = GST_JPEGENC (GST_OBJECT_PARENT (pad));
if (G_UNLIKELY (jpegenc->width <= 0 || jpegenc->height <= 0)) if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&jpegenc->info) ==
GST_VIDEO_FORMAT_UNKNOWN))
goto not_negotiated; goto not_negotiated;
data = GST_BUFFER_DATA (buf); if (!gst_video_frame_map (&frame, &jpegenc->info, buf, GST_MAP_READ))
size = GST_BUFFER_SIZE (buf); goto invalid_frame;
GST_LOG_OBJECT (jpegenc, "got buffer of %lu bytes", size); height = GST_VIDEO_FRAME_HEIGHT (&frame);
width = GST_VIDEO_FRAME_WIDTH (&frame);
ret = GST_LOG_OBJECT (jpegenc, "got buffer of %lu bytes",
gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad, gst_buffer_get_size (buf));
GST_BUFFER_OFFSET_NONE, jpegenc->bufsize, GST_PAD_CAPS (jpegenc->srcpad),
&jpegenc->output_buffer);
if (ret != GST_FLOW_OK)
goto done;
gst_buffer_copy_metadata (jpegenc->output_buffer, buf,
GST_BUFFER_COPY_TIMESTAMPS);
height = jpegenc->height;
for (i = 0; i < jpegenc->channels; i++) { for (i = 0; i < jpegenc->channels; i++) {
base[i] = data + jpegenc->offset[i]; base[i] = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
end[i] = base[i] + jpegenc->cheight[i] * jpegenc->stride[i]; stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
end[i] = base[i] + GST_VIDEO_FRAME_COMP_HEIGHT (&frame, i) * stride[i];
} }
jpegenc->jdest.next_output_byte = GST_BUFFER_DATA (jpegenc->output_buffer); jpegenc->output_mem = gst_allocator_alloc (NULL, jpegenc->bufsize, 3);
jpegenc->jdest.free_in_buffer = GST_BUFFER_SIZE (jpegenc->output_buffer); jpegenc->output_data =
gst_memory_map (jpegenc->output_mem, &jpegenc->output_size, NULL,
GST_MAP_READWRITE);
jpegenc->jdest.next_output_byte = jpegenc->output_data;
jpegenc->jdest.free_in_buffer = jpegenc->output_size;
/* prepare for raw input */ /* prepare for raw input */
#if JPEG_LIB_VERSION >= 70 #if JPEG_LIB_VERSION >= 70
@ -559,8 +563,8 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
for (k = 0; k < jpegenc->channels; k++) { for (k = 0; k < jpegenc->channels; k++) {
for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) { for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
jpegenc->line[k][j] = base[k]; jpegenc->line[k][j] = base[k];
if (base[k] + jpegenc->stride[k] < end[k]) if (base[k] + stride[k] < end[k])
base[k] += jpegenc->stride[k]; base[k] += stride[k];
} }
} }
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line, jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
@ -581,8 +585,8 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
src += jpegenc->inc[k]; src += jpegenc->inc[k];
dst++; dst++;
} }
if (base[k] + jpegenc->stride[k] < end[k]) if (base[k] + stride[k] < end[k])
base[k] += jpegenc->stride[k]; base[k] += stride[k];
} }
} }
jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line, jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
@ -590,12 +594,18 @@ gst_jpegenc_chain (GstPad * pad, GstBuffer * buf)
} }
} }
/* This will ensure that gst_jpegenc_term_destination is called; we push /* This will ensure that gst_jpegenc_term_destination is called */
the final output buffer from there */
jpeg_finish_compress (&jpegenc->cinfo); jpeg_finish_compress (&jpegenc->cinfo);
GST_LOG_OBJECT (jpegenc, "compressing done"); GST_LOG_OBJECT (jpegenc, "compressing done");
done: outbuf = gst_buffer_new ();
gst_buffer_copy_into (outbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
gst_buffer_take_memory (outbuf, -1, jpegenc->output_mem);
jpegenc->output_mem = NULL;
ret = gst_pad_push (jpegenc->srcpad, outbuf);
gst_video_frame_unmap (&frame);
gst_buffer_unref (buf); gst_buffer_unref (buf);
return ret; return ret;
@ -604,8 +614,14 @@ done:
not_negotiated: not_negotiated:
{ {
GST_WARNING_OBJECT (jpegenc, "no input format set (no caps on buffer)"); GST_WARNING_OBJECT (jpegenc, "no input format set (no caps on buffer)");
ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_unref (buf);
goto done; return GST_FLOW_NOT_NEGOTIATED;
}
invalid_frame:
{
GST_WARNING_OBJECT (jpegenc, "invalid frame received");
gst_buffer_unref (buf);
return GST_FLOW_OK;
} }
} }

View file

@ -46,8 +46,6 @@ G_BEGIN_DECLS
typedef struct _GstJpegEnc GstJpegEnc; typedef struct _GstJpegEnc GstJpegEnc;
typedef struct _GstJpegEncClass GstJpegEncClass; typedef struct _GstJpegEncClass GstJpegEncClass;
#define GST_JPEG_ENC_MAX_COMPONENT 4
struct _GstJpegEnc struct _GstJpegEnc
{ {
GstElement element; GstElement element;
@ -56,20 +54,15 @@ struct _GstJpegEnc
GstPad *sinkpad, *srcpad; GstPad *sinkpad, *srcpad;
/* stream/image properties */ /* stream/image properties */
GstVideoFormat format; GstVideoInfo info;
gint width;
gint height;
gint channels; gint channels;
gint fps_num, fps_den;
gint par_num, par_den;
/* standard video_format indexed */ /* standard video_format indexed */
gint stride[GST_JPEG_ENC_MAX_COMPONENT]; gint inc[GST_VIDEO_MAX_COMPONENTS];
gint offset[GST_JPEG_ENC_MAX_COMPONENT]; gint cwidth[GST_VIDEO_MAX_COMPONENTS];
gint inc[GST_JPEG_ENC_MAX_COMPONENT]; gint cheight[GST_VIDEO_MAX_COMPONENTS];
gint cwidth[GST_JPEG_ENC_MAX_COMPONENT]; gint h_samp[GST_VIDEO_MAX_COMPONENTS];
gint cheight[GST_JPEG_ENC_MAX_COMPONENT]; gint v_samp[GST_VIDEO_MAX_COMPONENTS];
gint h_samp[GST_JPEG_ENC_MAX_COMPONENT];
gint v_samp[GST_JPEG_ENC_MAX_COMPONENT];
gint h_max_samp; gint h_max_samp;
gint v_max_samp; gint v_max_samp;
gboolean planar; gboolean planar;
@ -92,15 +85,14 @@ struct _GstJpegEnc
/* cached return state for any problems that may occur in callbacks */ /* cached return state for any problems that may occur in callbacks */
GstFlowReturn last_ret; GstFlowReturn last_ret;
GstBuffer *output_buffer; GstMemory *output_mem;
gpointer output_data;
gsize output_size;
}; };
struct _GstJpegEncClass struct _GstJpegEncClass
{ {
GstElementClass parent_class; GstElementClass parent_class;
/* signals */
void (*frame_encoded) (GstElement * element);
}; };
GType gst_jpegenc_get_type (void); GType gst_jpegenc_get_type (void);