mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-26 11:41:09 +00:00
jpeg: Port to 0.11 again
This commit is contained in:
parent
8e9eb77816
commit
62580b14f6
4 changed files with 162 additions and 155 deletions
|
@ -42,6 +42,8 @@
|
|||
#include "gstjpegdec.h"
|
||||
#include "gstjpeg.h"
|
||||
#include <gst/video/video.h>
|
||||
#include <gst/video/gstvideometa.h>
|
||||
#include <gst/video/gstvideopool.h>
|
||||
#include "gst/gst-i18n-plugin.h"
|
||||
#include <jerror.h>
|
||||
|
||||
|
@ -68,11 +70,8 @@ static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
|
|||
GST_STATIC_PAD_TEMPLATE ("src",
|
||||
GST_PAD_SRC,
|
||||
GST_PAD_ALWAYS,
|
||||
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420") "; "
|
||||
GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; "
|
||||
GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
|
||||
GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR "; "
|
||||
GST_VIDEO_CAPS_GRAY8)
|
||||
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
|
||||
("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
|
||||
);
|
||||
/* *INDENT-ON* */
|
||||
|
||||
|
@ -92,18 +91,6 @@ GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
|
|||
#define GST_CAT_DEFAULT jpeg_dec_debug
|
||||
GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
|
||||
|
||||
/* These macros are adapted from videotestsrc.c
|
||||
* and/or gst-plugins/gst/games/gstvideoimage.c */
|
||||
#define I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
|
||||
#define I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
|
||||
#define I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(I420_Y_ROWSTRIDE(width)))/2)
|
||||
|
||||
#define I420_Y_OFFSET(w,h) (0)
|
||||
#define I420_U_OFFSET(w,h) (I420_Y_OFFSET(w,h)+(I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
|
||||
#define I420_V_OFFSET(w,h) (I420_U_OFFSET(w,h)+(I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
|
||||
|
||||
#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
|
||||
|
||||
static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
|
||||
|
@ -118,9 +105,11 @@ static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
|
|||
GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
|
||||
static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
|
||||
GstVideoCodecFrame * frame);
|
||||
static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
|
||||
GstQuery * query);
|
||||
|
||||
GST_BOILERPLATE (GstJpegDec, gst_jpeg_dec, GstVideoDecoder,
|
||||
GST_TYPE_VIDEO_DECODER);
|
||||
#define gst_jpeg_dec_parent_class parent_class
|
||||
G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_finalize (GObject * object)
|
||||
|
@ -132,27 +121,15 @@ gst_jpeg_dec_finalize (GObject * object)
|
|||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_base_init (gpointer g_class)
|
||||
{
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
|
||||
|
||||
gst_element_class_add_static_pad_template (element_class,
|
||||
&gst_jpeg_dec_src_pad_template);
|
||||
gst_element_class_add_static_pad_template (element_class,
|
||||
&gst_jpeg_dec_sink_pad_template);
|
||||
gst_element_class_set_details_simple (element_class, "JPEG image decoder",
|
||||
"Codec/Decoder/Image",
|
||||
"Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
|
||||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_class_init (GstJpegDecClass * klass)
|
||||
{
|
||||
GObjectClass *gobject_class;
|
||||
GstElementClass *element_class;
|
||||
GstVideoDecoderClass *vdec_class;
|
||||
|
||||
gobject_class = (GObjectClass *) klass;
|
||||
element_class = (GstElementClass *) klass;
|
||||
vdec_class = (GstVideoDecoderClass *) klass;
|
||||
|
||||
parent_class = g_type_class_peek_parent (klass);
|
||||
|
@ -182,12 +159,21 @@ gst_jpeg_dec_class_init (GstJpegDecClass * klass)
|
|||
-1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
|
||||
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
||||
|
||||
gst_element_class_add_pad_template (element_class,
|
||||
gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
|
||||
gst_element_class_add_pad_template (element_class,
|
||||
gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
|
||||
gst_element_class_set_details_simple (element_class, "JPEG image decoder",
|
||||
"Codec/Decoder/Image",
|
||||
"Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
|
||||
|
||||
vdec_class->start = gst_jpeg_dec_start;
|
||||
vdec_class->stop = gst_jpeg_dec_stop;
|
||||
vdec_class->reset = gst_jpeg_dec_reset;
|
||||
vdec_class->parse = gst_jpeg_dec_parse;
|
||||
vdec_class->set_format = gst_jpeg_dec_set_format;
|
||||
vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
|
||||
vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
|
||||
|
||||
GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
|
||||
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
|
||||
|
@ -276,11 +262,10 @@ gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
|
|||
dec = CINFO_GET_JPEGDEC (cinfo);
|
||||
g_return_val_if_fail (dec != NULL, FALSE);
|
||||
g_return_val_if_fail (dec->current_frame != NULL, FALSE);
|
||||
g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
|
||||
|
||||
cinfo->src->next_input_byte =
|
||||
GST_BUFFER_DATA (dec->current_frame->input_buffer);
|
||||
cinfo->src->bytes_in_buffer =
|
||||
GST_BUFFER_SIZE (dec->current_frame->input_buffer);
|
||||
cinfo->src->next_input_byte = dec->current_frame_map.data;
|
||||
cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -361,7 +346,7 @@ METHODDEF (void)
|
|||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_init (GstJpegDec * dec, GstJpegDecClass * class)
|
||||
gst_jpeg_dec_init (GstJpegDec * dec)
|
||||
{
|
||||
GST_DEBUG ("initializing");
|
||||
|
||||
|
@ -809,19 +794,28 @@ gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
|
|||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, guchar * base[1],
|
||||
guint width, guint height, guint pstride, guint rstride)
|
||||
gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
|
||||
{
|
||||
guchar *rows[16];
|
||||
guchar **scanarray[1] = { rows };
|
||||
gint i, j, k;
|
||||
gint lines;
|
||||
guint8 *base[1];
|
||||
gint width, height;
|
||||
gint pstride, rstride;
|
||||
|
||||
GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
|
||||
|
||||
width = GST_VIDEO_FRAME_WIDTH (frame);
|
||||
height = GST_VIDEO_FRAME_HEIGHT (frame);
|
||||
|
||||
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
|
||||
return;
|
||||
|
||||
base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
|
||||
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
|
||||
rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
|
||||
|
||||
memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
|
||||
|
||||
i = 0;
|
||||
|
@ -845,19 +839,30 @@ gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, guchar * base[1],
|
|||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, guchar * base[3],
|
||||
guint width, guint height, guint pstride, guint rstride)
|
||||
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
|
||||
{
|
||||
guchar *r_rows[16], *g_rows[16], *b_rows[16];
|
||||
guchar **scanarray[3] = { r_rows, g_rows, b_rows };
|
||||
gint i, j, k;
|
||||
gint lines;
|
||||
guint8 *base[3];
|
||||
guint pstride, rstride;
|
||||
gint width, height;
|
||||
|
||||
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
|
||||
|
||||
width = GST_VIDEO_FRAME_WIDTH (frame);
|
||||
height = GST_VIDEO_FRAME_HEIGHT (frame);
|
||||
|
||||
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
|
||||
return;
|
||||
|
||||
for (i = 0; i < 3; i++)
|
||||
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
|
||||
|
||||
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
|
||||
rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
|
||||
|
||||
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
|
||||
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
|
||||
memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
|
||||
|
@ -887,20 +892,35 @@ gst_jpeg_dec_decode_rgb (GstJpegDec * dec, guchar * base[3],
|
|||
}
|
||||
|
||||
static void
|
||||
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
|
||||
guchar * last[3], guint width, guint height, gint r_v, gint r_h, gint comp)
|
||||
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
|
||||
gint r_h, gint comp)
|
||||
{
|
||||
guchar *y_rows[16], *u_rows[16], *v_rows[16];
|
||||
guchar **scanarray[3] = { y_rows, u_rows, v_rows };
|
||||
gint i, j, k;
|
||||
gint lines;
|
||||
guchar *base[3], *last[3];
|
||||
gint stride[3];
|
||||
gint width, height;
|
||||
|
||||
GST_DEBUG_OBJECT (dec,
|
||||
"unadvantageous width or r_h, taking slow route involving memcpy");
|
||||
|
||||
width = GST_VIDEO_FRAME_WIDTH (frame);
|
||||
height = GST_VIDEO_FRAME_HEIGHT (frame);
|
||||
|
||||
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
|
||||
return;
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
|
||||
stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
|
||||
/* make sure we don't make jpeglib write beyond our buffer,
|
||||
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
|
||||
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
|
||||
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
|
||||
}
|
||||
|
||||
memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
|
||||
memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
|
||||
memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
|
||||
|
@ -919,30 +939,30 @@ gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
|
|||
if (G_LIKELY (lines > 0)) {
|
||||
for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
|
||||
if (G_LIKELY (base[0] <= last[0])) {
|
||||
memcpy (base[0], y_rows[j], I420_Y_ROWSTRIDE (width));
|
||||
base[0] += I420_Y_ROWSTRIDE (width);
|
||||
memcpy (base[0], y_rows[j], stride[0]);
|
||||
base[0] += stride[0];
|
||||
}
|
||||
if (r_v == 2) {
|
||||
if (G_LIKELY (base[0] <= last[0])) {
|
||||
memcpy (base[0], y_rows[j + 1], I420_Y_ROWSTRIDE (width));
|
||||
base[0] += I420_Y_ROWSTRIDE (width);
|
||||
memcpy (base[0], y_rows[j + 1], stride[0]);
|
||||
base[0] += stride[0];
|
||||
}
|
||||
}
|
||||
if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
|
||||
if (r_h == 2) {
|
||||
memcpy (base[1], u_rows[k], I420_U_ROWSTRIDE (width));
|
||||
memcpy (base[2], v_rows[k], I420_V_ROWSTRIDE (width));
|
||||
memcpy (base[1], u_rows[k], stride[1]);
|
||||
memcpy (base[2], v_rows[k], stride[2]);
|
||||
} else if (r_h == 1) {
|
||||
hresamplecpy1 (base[1], u_rows[k], I420_U_ROWSTRIDE (width));
|
||||
hresamplecpy1 (base[2], v_rows[k], I420_V_ROWSTRIDE (width));
|
||||
hresamplecpy1 (base[1], u_rows[k], stride[1]);
|
||||
hresamplecpy1 (base[2], v_rows[k], stride[2]);
|
||||
} else {
|
||||
/* FIXME: implement (at least we avoid crashing by doing nothing) */
|
||||
}
|
||||
}
|
||||
|
||||
if (r_v == 2 || (k & 1) != 0) {
|
||||
base[1] += I420_U_ROWSTRIDE (width);
|
||||
base[2] += I420_V_ROWSTRIDE (width);
|
||||
base[1] += stride[1];
|
||||
base[2] += stride[2];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -951,27 +971,8 @@ gst_jpeg_dec_decode_indirect (GstJpegDec * dec, guchar * base[3],
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef GST_DISABLE_GST_DEBUG
|
||||
static inline void
|
||||
dump_lines (guchar * base[3], guchar ** line[3], int v_samp0, int width)
|
||||
{
|
||||
int j;
|
||||
|
||||
for (j = 0; j < (v_samp0 * DCTSIZE); ++j) {
|
||||
GST_LOG ("[%02d] %5d %5d %5d", j,
|
||||
(line[0][j] >= base[0]) ?
|
||||
(int) (line[0][j] - base[0]) / I420_Y_ROWSTRIDE (width) : -1,
|
||||
(line[1][j] >= base[1]) ?
|
||||
(int) (line[1][j] - base[1]) / I420_U_ROWSTRIDE (width) : -1,
|
||||
(line[2][j] >= base[2]) ?
|
||||
(int) (line[2][j] - base[2]) / I420_V_ROWSTRIDE (width) : -1);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
static GstFlowReturn
|
||||
gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
|
||||
guchar * last[3], guint width, guint height)
|
||||
gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
|
||||
{
|
||||
guchar **line[3]; /* the jpeg line buffer */
|
||||
guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
|
||||
|
@ -979,6 +980,9 @@ gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
|
|||
guchar *v[4 * DCTSIZE] = { NULL, };
|
||||
gint i, j;
|
||||
gint lines, v_samp[3];
|
||||
guchar *base[3], *last[3];
|
||||
gint stride[3];
|
||||
guint height;
|
||||
|
||||
line[0] = y;
|
||||
line[1] = u;
|
||||
|
@ -991,35 +995,44 @@ gst_jpeg_dec_decode_direct (GstJpegDec * dec, guchar * base[3],
|
|||
if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
|
||||
goto format_not_supported;
|
||||
|
||||
height = GST_VIDEO_FRAME_HEIGHT (frame);
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
|
||||
stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
|
||||
/* make sure we don't make jpeglib write beyond our buffer,
|
||||
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
|
||||
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
|
||||
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
|
||||
}
|
||||
|
||||
/* let jpeglib decode directly into our final buffer */
|
||||
GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
|
||||
|
||||
for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
|
||||
for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
|
||||
/* Y */
|
||||
line[0][j] = base[0] + (i + j) * I420_Y_ROWSTRIDE (width);
|
||||
line[0][j] = base[0] + (i + j) * stride[0];
|
||||
if (G_UNLIKELY (line[0][j] > last[0]))
|
||||
line[0][j] = last[0];
|
||||
/* U */
|
||||
if (v_samp[1] == v_samp[0]) {
|
||||
line[1][j] = base[1] + ((i + j) / 2) * I420_U_ROWSTRIDE (width);
|
||||
line[1][j] = base[1] + ((i + j) / 2) * stride[1];
|
||||
} else if (j < (v_samp[1] * DCTSIZE)) {
|
||||
line[1][j] = base[1] + ((i / 2) + j) * I420_U_ROWSTRIDE (width);
|
||||
line[1][j] = base[1] + ((i / 2) + j) * stride[1];
|
||||
}
|
||||
if (G_UNLIKELY (line[1][j] > last[1]))
|
||||
line[1][j] = last[1];
|
||||
/* V */
|
||||
if (v_samp[2] == v_samp[0]) {
|
||||
line[2][j] = base[2] + ((i + j) / 2) * I420_V_ROWSTRIDE (width);
|
||||
line[2][j] = base[2] + ((i + j) / 2) * stride[2];
|
||||
} else if (j < (v_samp[2] * DCTSIZE)) {
|
||||
line[2][j] = base[2] + ((i / 2) + j) * I420_V_ROWSTRIDE (width);
|
||||
line[2][j] = base[2] + ((i / 2) + j) * stride[2];
|
||||
}
|
||||
if (G_UNLIKELY (line[2][j] > last[2]))
|
||||
line[2][j] = last[2];
|
||||
}
|
||||
|
||||
/* dump_lines (base, line, v_samp[0], width); */
|
||||
|
||||
lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
|
||||
if (G_UNLIKELY (!lines)) {
|
||||
GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
|
||||
|
@ -1075,25 +1088,6 @@ gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
|
|||
|
||||
gst_video_codec_state_unref (outstate);
|
||||
|
||||
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
|
||||
/* some format info */
|
||||
dec->offset[0] =
|
||||
gst_video_format_get_component_offset (format, 0, width, height);
|
||||
dec->offset[1] =
|
||||
gst_video_format_get_component_offset (format, 1, width, height);
|
||||
dec->offset[2] =
|
||||
gst_video_format_get_component_offset (format, 2, width, height);
|
||||
/* equal for all components */
|
||||
dec->stride = gst_video_format_get_row_stride (format, 0, width);
|
||||
dec->inc = gst_video_format_get_pixel_stride (format, 0);
|
||||
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
|
||||
/* TODO is anything else then 8bit supported in jpeg? */
|
||||
dec->offset[0] =
|
||||
gst_video_format_get_component_offset (format, 0, width, height);
|
||||
dec->stride = gst_video_format_get_row_stride (format, 0, width);
|
||||
dec->inc = gst_video_format_get_pixel_stride (format, 0);
|
||||
}
|
||||
|
||||
GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
|
||||
GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
|
||||
}
|
||||
|
@ -1103,11 +1097,12 @@ gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
|
|||
{
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
GstJpegDec *dec = (GstJpegDec *) bdec;
|
||||
guchar *outdata;
|
||||
guchar *base[3], *last[3];
|
||||
GstVideoFrame vframe;
|
||||
gint width, height;
|
||||
gint r_h, r_v;
|
||||
guint code, hdr_ok;
|
||||
gboolean need_unmap = TRUE;
|
||||
GstVideoCodecState *state = NULL;
|
||||
|
||||
#if 0
|
||||
again:
|
||||
|
@ -1135,6 +1130,7 @@ again:
|
|||
#endif
|
||||
|
||||
dec->current_frame = frame;
|
||||
gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
|
||||
gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
|
||||
|
||||
if (setjmp (dec->jerr.setjmp_buffer)) {
|
||||
|
@ -1232,42 +1228,22 @@ again:
|
|||
|
||||
gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
|
||||
|
||||
state = gst_video_decoder_get_output_state (bdec);
|
||||
ret = gst_video_decoder_alloc_output_frame (bdec, frame);
|
||||
if (G_UNLIKELY (ret != GST_FLOW_OK))
|
||||
goto alloc_failed;
|
||||
|
||||
outdata = GST_BUFFER_DATA (frame->output_buffer);
|
||||
if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
|
||||
GST_MAP_READWRITE))
|
||||
goto alloc_failed;
|
||||
|
||||
GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
|
||||
|
||||
|
||||
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
|
||||
base[0] = outdata + dec->offset[0];
|
||||
base[1] = outdata + dec->offset[1];
|
||||
base[2] = outdata + dec->offset[2];
|
||||
gst_jpeg_dec_decode_rgb (dec, base, width, height, dec->inc, dec->stride);
|
||||
gst_jpeg_dec_decode_rgb (dec, &vframe);
|
||||
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
|
||||
base[0] = outdata + dec->offset[0];
|
||||
gst_jpeg_dec_decode_grayscale (dec, base, width, height, dec->inc,
|
||||
dec->stride);
|
||||
gst_jpeg_dec_decode_grayscale (dec, &vframe);
|
||||
} else {
|
||||
/* mind the swap, jpeglib outputs blue chroma first
|
||||
* ensonic: I see no swap?
|
||||
*/
|
||||
base[0] = outdata + I420_Y_OFFSET (width, height);
|
||||
base[1] = outdata + I420_U_OFFSET (width, height);
|
||||
base[2] = outdata + I420_V_OFFSET (width, height);
|
||||
|
||||
/* make sure we don't make jpeglib write beyond our buffer,
|
||||
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
|
||||
last[0] = base[0] + (I420_Y_ROWSTRIDE (width) * (height - 1));
|
||||
last[1] =
|
||||
base[1] + (I420_U_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
|
||||
1));
|
||||
last[2] =
|
||||
base[2] + (I420_V_ROWSTRIDE (width) * ((GST_ROUND_UP_2 (height) / 2) -
|
||||
1));
|
||||
|
||||
GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
|
||||
dec->cinfo.rec_outbuf_height);
|
||||
|
||||
|
@ -1282,23 +1258,27 @@ again:
|
|||
|| dec->cinfo.comp_info[2].h_samp_factor != 1)) {
|
||||
GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
|
||||
"indirect decoding using extra buffer copy");
|
||||
gst_jpeg_dec_decode_indirect (dec, base, last, width, height, r_v, r_h,
|
||||
gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
|
||||
dec->cinfo.num_components);
|
||||
} else {
|
||||
ret = gst_jpeg_dec_decode_direct (dec, base, last, width, height);
|
||||
ret = gst_jpeg_dec_decode_direct (dec, &vframe);
|
||||
|
||||
if (G_UNLIKELY (ret != GST_FLOW_OK))
|
||||
goto decode_direct_failed;
|
||||
}
|
||||
}
|
||||
|
||||
gst_video_frame_unmap (&vframe);
|
||||
|
||||
GST_LOG_OBJECT (dec, "decompressing finished");
|
||||
jpeg_finish_decompress (&dec->cinfo);
|
||||
|
||||
/* reset error count on successful decode */
|
||||
dec->error_count = 0;
|
||||
|
||||
gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
|
||||
ret = gst_video_decoder_finish_frame (bdec, frame);
|
||||
need_unmap = FALSE;
|
||||
|
||||
done:
|
||||
|
||||
|
@ -1309,6 +1289,12 @@ exit:
|
|||
ret = gst_jpeg_dec_post_error_or_warning (dec);
|
||||
}
|
||||
|
||||
if (need_unmap)
|
||||
gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
|
||||
|
||||
if (state)
|
||||
gst_video_codec_state_unref (state);
|
||||
|
||||
return ret;
|
||||
|
||||
/* special cases */
|
||||
|
@ -1335,7 +1321,9 @@ decode_error:
|
|||
gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
|
||||
"Decode error #%u: %s", code, err_msg);
|
||||
|
||||
gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
|
||||
gst_video_decoder_drop_frame (bdec, frame);
|
||||
need_unmap = FALSE;
|
||||
|
||||
ret = GST_FLOW_ERROR;
|
||||
goto done;
|
||||
|
@ -1355,7 +1343,7 @@ alloc_failed:
|
|||
GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
|
||||
/* Reset for next time */
|
||||
jpeg_abort_decompress (&dec->cinfo);
|
||||
if (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_WRONG_STATE &&
|
||||
if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
|
||||
ret != GST_FLOW_NOT_LINKED) {
|
||||
gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
|
||||
"Buffer allocation failed, reason: %s", reason);
|
||||
|
@ -1386,14 +1374,36 @@ invalid_yuvrgbgrayscale:
|
|||
}
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
|
||||
{
|
||||
GstBufferPool *pool;
|
||||
GstStructure *config;
|
||||
|
||||
if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
|
||||
return FALSE;
|
||||
|
||||
g_assert (gst_query_get_n_allocation_pools (query) > 0);
|
||||
gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
|
||||
g_assert (pool != NULL);
|
||||
|
||||
config = gst_buffer_pool_get_config (pool);
|
||||
if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) {
|
||||
gst_buffer_pool_config_add_option (config,
|
||||
GST_BUFFER_POOL_OPTION_VIDEO_META);
|
||||
}
|
||||
gst_buffer_pool_set_config (pool, config);
|
||||
gst_object_unref (pool);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard)
|
||||
{
|
||||
GstJpegDec *dec = (GstJpegDec *) bdec;
|
||||
|
||||
jpeg_abort_decompress (&dec->cinfo);
|
||||
g_free (dec->cur_buf);
|
||||
dec->cur_buf = NULL;
|
||||
dec->parse_offset = 0;
|
||||
dec->parse_entropy_len = 0;
|
||||
dec->parse_resync = FALSE;
|
||||
|
@ -1454,7 +1464,6 @@ gst_jpeg_dec_start (GstVideoDecoder * bdec)
|
|||
dec->parse_offset = 0;
|
||||
dec->parse_entropy_len = 0;
|
||||
dec->parse_resync = FALSE;
|
||||
dec->cur_buf = NULL;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -1464,8 +1473,6 @@ gst_jpeg_dec_stop (GstVideoDecoder * bdec)
|
|||
{
|
||||
GstJpegDec *dec = (GstJpegDec *) bdec;
|
||||
|
||||
g_free (dec->cur_buf);
|
||||
dec->cur_buf = NULL;
|
||||
gst_jpeg_dec_free_buffers (dec);
|
||||
|
||||
return TRUE;
|
||||
|
|
|
@ -69,15 +69,10 @@ struct GstJpegDecSourceMgr {
|
|||
struct _GstJpegDec {
|
||||
GstVideoDecoder decoder;
|
||||
|
||||
guint8 *cur_buf;
|
||||
|
||||
/* negotiated state */
|
||||
GstVideoCodecState *input_state;
|
||||
GstVideoCodecFrame *current_frame;
|
||||
|
||||
gint offset[3];
|
||||
gint stride;
|
||||
gint inc;
|
||||
GstMapInfo current_frame_map;
|
||||
|
||||
/* parse state */
|
||||
gboolean saw_header;
|
||||
|
|
|
@ -226,7 +226,9 @@ gst_jpegenc_flush_destination (j_compress_ptr cinfo)
|
|||
static void
|
||||
gst_jpegenc_term_destination (j_compress_ptr cinfo)
|
||||
{
|
||||
GstBuffer *outbuf;
|
||||
GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
|
||||
|
||||
GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source");
|
||||
|
||||
gst_memory_unmap (jpegenc->output_mem, &jpegenc->output_map);
|
||||
|
@ -236,6 +238,14 @@ gst_jpegenc_term_destination (j_compress_ptr cinfo)
|
|||
jpegenc->output_map.data = NULL;
|
||||
jpegenc->output_map.size = 0;
|
||||
|
||||
outbuf = gst_buffer_new ();
|
||||
gst_buffer_copy_into (outbuf, jpegenc->current_frame->input_buffer,
|
||||
GST_BUFFER_COPY_METADATA, 0, -1);
|
||||
gst_buffer_append_memory (outbuf, jpegenc->output_mem);
|
||||
jpegenc->output_mem = NULL;
|
||||
|
||||
jpegenc->current_frame->output_buffer = outbuf;
|
||||
|
||||
gst_video_frame_unmap (&jpegenc->current_vframe);
|
||||
|
||||
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (jpegenc),
|
||||
|
|
|
@ -49,24 +49,21 @@ G_BEGIN_DECLS
|
|||
typedef struct _GstJpegEnc GstJpegEnc;
|
||||
typedef struct _GstJpegEncClass GstJpegEncClass;
|
||||
|
||||
#define GST_JPEG_ENC_MAX_COMPONENT 4
|
||||
|
||||
struct _GstJpegEnc
|
||||
{
|
||||
GstVideoEncoder encoder;
|
||||
|
||||
GstVideoCodecState *input_state;
|
||||
GstVideoFrame current_vframe;
|
||||
GstVideoCodecFrame *current_frame;
|
||||
|
||||
guint channels;
|
||||
|
||||
gint stride[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint offset[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint inc[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint cwidth[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint cheight[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint h_samp[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint v_samp[GST_JPEG_ENC_MAX_COMPONENT];
|
||||
gint inc[GST_VIDEO_MAX_COMPONENTS];
|
||||
gint cwidth[GST_VIDEO_MAX_COMPONENTS];
|
||||
gint cheight[GST_VIDEO_MAX_COMPONENTS];
|
||||
gint h_samp[GST_VIDEO_MAX_COMPONENTS];
|
||||
gint v_samp[GST_VIDEO_MAX_COMPONENTS];
|
||||
gint h_max_samp;
|
||||
gint v_max_samp;
|
||||
gboolean planar;
|
||||
|
@ -86,15 +83,13 @@ struct _GstJpegEnc
|
|||
gint smoothing;
|
||||
gint idct_method;
|
||||
|
||||
GstBuffer *output_buffer;
|
||||
GstMemory *output_mem;
|
||||
GstMapInfo output_map;
|
||||
};
|
||||
|
||||
struct _GstJpegEncClass
|
||||
{
|
||||
GstVideoEncoderClass parent_class;
|
||||
|
||||
/* signals */
|
||||
void (*frame_encoded) (GstElement * element);
|
||||
};
|
||||
|
||||
GType gst_jpegenc_get_type (void);
|
||||
|
|
Loading…
Reference in a new issue