openjpegenc: take subsampling into account when calculating stripe height

We calculate minimum of (stripe height * sub sampling) across all components
to ensure that all component dimensions are consistent with sub-sampling.
The last stripe for each component is simply the remaining height.

limit wavelet resolutions for "thin" stripes

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1800>
This commit is contained in:
Aaron Boxer 2020-03-24 09:15:30 -04:00 committed by GStreamer Merge Bot
parent 4f6b609558
commit af87da86e2
2 changed files with 76 additions and 23 deletions

View file

@ -28,6 +28,7 @@
#include <gst/codecparsers/gstjpeg2000sampling.h> #include <gst/codecparsers/gstjpeg2000sampling.h>
#include <string.h> #include <string.h>
#include <math.h>
GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_enc_debug); GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_enc_debug);
#define GST_CAT_DEFAULT gst_openjpeg_enc_debug #define GST_CAT_DEFAULT gst_openjpeg_enc_debug
@ -504,12 +505,15 @@ fill_image_planar16_3 (opj_image_t * image, GstVideoFrame * frame)
gint sstride; gint sstride;
for (c = 0; c < 3; c++) { for (c = 0; c < 3; c++) {
opj_image_comp_t *comp = image->comps + c;
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c); w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
h = image->comps[c].h; h = comp->h;
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c) / 2; sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c) / 2;
data_in = data_in =
(guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c) + image->y0 * sstride; (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame,
data_out = image->comps[c].data; c) + (image->y0 / comp->dy) * sstride;
data_out = comp->data;
for (y = 0; y < h; y++) { for (y = 0; y < h; y++) {
tmp = data_in; tmp = data_in;
@ -534,12 +538,15 @@ fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
gint sstride; gint sstride;
for (c = 0; c < 3; c++) { for (c = 0; c < 3; c++) {
opj_image_comp_t *comp = image->comps + c;
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c); w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
h = image->comps[c].h; h = comp->h;
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c); sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
data_in = data_in =
(guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame, c) + image->y0 * sstride; (guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame,
data_out = image->comps[c].data; c) + (image->y0 / comp->dy) * sstride;
data_out = comp->data;
for (y = 0; y < h; y++) { for (y = 0; y < h; y++) {
tmp = data_in; tmp = data_in;
@ -562,12 +569,14 @@ fill_image_planar8_1 (opj_image_t * image, GstVideoFrame * frame)
const guint8 *data_in, *tmp; const guint8 *data_in, *tmp;
gint *data_out; gint *data_out;
gint sstride; gint sstride;
opj_image_comp_t *comp = image->comps;
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
h = image->comps[0].h; h = comp->h;
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0); sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
data_in = data_in =
(guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame, 0) + image->y0 * sstride; (guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame,
0) + (image->y0 / comp->dy) * sstride;
data_out = image->comps[0].data; data_out = image->comps[0].data;
for (y = 0; y < h; y++) { for (y = 0; y < h; y++) {
@ -590,13 +599,15 @@ fill_image_planar16_1 (opj_image_t * image, GstVideoFrame * frame)
const guint16 *data_in, *tmp; const guint16 *data_in, *tmp;
gint *data_out; gint *data_out;
gint sstride; gint sstride;
opj_image_comp_t *comp = image->comps;
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
h = image->comps[0].h; h = comp->h;
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2; sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
data_in = data_in =
(guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, 0) + image->y0 * sstride; (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame,
data_out = image->comps[0].data; 0) + (image->y0 / comp->dy) * sstride;
data_out = comp->data;
for (y = 0; y < h; y++) { for (y = 0; y < h; y++) {
tmp = data_in; tmp = data_in;
@ -778,12 +789,10 @@ static opj_image_t *
gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame, gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame,
guint slice_num) guint slice_num)
{ {
gint i, ncomps; gint i, ncomps, temp, min_height = INT_MAX;
opj_image_cmptparm_t *comps; opj_image_cmptparm_t *comps;
OPJ_COLOR_SPACE colorspace; OPJ_COLOR_SPACE colorspace;
opj_image_t *image; opj_image_t *image;
guint nominal_stripe_height =
GST_VIDEO_FRAME_HEIGHT (frame) / self->num_stripes;
ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame); ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
comps = g_new0 (opj_image_cmptparm_t, ncomps); comps = g_new0 (opj_image_cmptparm_t, ncomps);
@ -793,13 +802,27 @@ gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame,
comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i); comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
comps[i].sgnd = 0; comps[i].sgnd = 0;
comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i); comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
comps[i].h =
get_stripe_height (self, slice_num, GST_VIDEO_FRAME_COMP_HEIGHT (frame,
i));
comps[i].dx = comps[i].dx =
GST_VIDEO_FRAME_WIDTH (frame) / GST_VIDEO_FRAME_COMP_WIDTH (frame, i); (guint) ((float) GST_VIDEO_FRAME_WIDTH (frame) /
GST_VIDEO_FRAME_COMP_WIDTH (frame, i) + 0.5f);
comps[i].dy = comps[i].dy =
GST_VIDEO_FRAME_HEIGHT (frame) / GST_VIDEO_FRAME_COMP_HEIGHT (frame, i); (guint) ((float) GST_VIDEO_FRAME_HEIGHT (frame) /
GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) + 0.5f);
temp =
(GST_VIDEO_FRAME_COMP_HEIGHT (frame,
i) / self->num_stripes) * comps[i].dy;
if (temp < min_height)
min_height = temp;
}
for (i = 0; i < ncomps; i++) {
gint nominal_height = min_height / comps[i].dy;
comps[i].h = (slice_num < self->num_stripes - 1) ?
nominal_height
: GST_VIDEO_FRAME_COMP_HEIGHT (frame,
i) - (self->num_stripes - 1) * nominal_height;
} }
if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV)) if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
@ -812,14 +835,22 @@ gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame,
g_return_val_if_reached (NULL); g_return_val_if_reached (NULL);
image = opj_image_create (ncomps, comps, colorspace); image = opj_image_create (ncomps, comps, colorspace);
if (!image) {
GST_WARNING_OBJECT (self,
"Unable to create a JPEG image. first component height=%d",
ncomps ? comps[0].h : 0);
return NULL;
}
g_free (comps); g_free (comps);
image->x0 = 0; image->x0 = 0;
image->x1 = GST_VIDEO_FRAME_WIDTH (frame); image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
image->y0 = slice_num * nominal_stripe_height; image->y0 = slice_num * min_height;
image->y1 = image->y1 =
image->y0 + get_stripe_height (self, slice_num, (slice_num <
GST_VIDEO_FRAME_HEIGHT (frame)); self->num_stripes - 1) ? image->y0 +
min_height : GST_VIDEO_FRAME_HEIGHT (frame);
self->fill_image (image, frame); self->fill_image (image, frame);
return image; return image;
@ -942,6 +973,8 @@ gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
if (stripe_mode) { if (stripe_mode) {
const gchar *str = gst_structure_get_string (s, "alignment"); const gchar *str = gst_structure_get_string (s, "alignment");
gint min_res;
if (g_strcmp0 (str, "stripe") != 0) { if (g_strcmp0 (str, "stripe") != 0) {
GST_ERROR_OBJECT (self, GST_ERROR_OBJECT (self,
"Number of stripes set to %d, but alignment=stripe not supported downstream", "Number of stripes set to %d, but alignment=stripe not supported downstream",
@ -950,6 +983,26 @@ gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
ret = GST_FLOW_NOT_NEGOTIATED; ret = GST_FLOW_NOT_NEGOTIATED;
goto done; goto done;
} }
/* due to limitations in openjpeg library,
* number of wavelet resolutions must not exceed floor(log(stripe height)) + 1 */
if (!gst_video_frame_map (&vframe, &self->input_state->info,
frame->input_buffer, GST_MAP_READ)) {
gst_video_codec_frame_unref (frame);
GST_ELEMENT_ERROR (self, CORE, FAILED,
("Failed to map input buffer"), (NULL));
return GST_FLOW_ERROR;
}
/* find stripe with least height */
min_res =
get_stripe_height (self, self->num_stripes - 1,
GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0));
min_res = MIN (min_res, get_stripe_height (self, 0,
GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0)));
/* take log to find correct number of wavelet resolutions */
min_res = min_res > 1 ? (gint) log (min_res) + 1 : 1;
self->params.numresolution = MIN (min_res + 1, self->params.numresolution);
gst_video_frame_unmap (&vframe);
} }

View file

@ -21,7 +21,7 @@ if openjpeg_dep.found()
link_args : noseh_link_args, link_args : noseh_link_args,
include_directories : [configinc], include_directories : [configinc],
dependencies : [gst_dep, gstvideo_dep, openjpeg_dep, dependencies : [gst_dep, gstvideo_dep, openjpeg_dep,
gstcodecparsers_dep], gstcodecparsers_dep, libm],
install : true, install : true,
install_dir : plugins_install_dir, install_dir : plugins_install_dir,
) )