dvbsuboverlay: use video overlay composition helper

... rather than custom home-made blending.

Conflicts:

	gst/dvbsuboverlay/gstdvbsuboverlay.c
This commit is contained in:
Mark Nauwelaerts 2012-07-13 12:27:57 +02:00
parent 76aedf5641
commit 2d0866ec28
2 changed files with 125 additions and 239 deletions

View file

@ -175,6 +175,10 @@ gst_dvbsub_overlay_flush_subtitles (GstDVBSubOverlay * render)
dvb_subtitles_free (render->current_subtitle);
render->current_subtitle = NULL;
if (render->current_comp)
gst_video_overlay_composition_unref (render->current_comp);
render->current_comp = NULL;
if (render->dvb_sub)
dvb_sub_free (render->dvb_sub);
@ -256,6 +260,10 @@ gst_dvbsub_overlay_finalize (GObject * object)
dvb_subtitles_free (overlay->current_subtitle);
overlay->current_subtitle = NULL;
if (overlay->current_comp)
gst_video_overlay_composition_unref (overlay->current_comp);
overlay->current_comp = NULL;
if (overlay->dvb_sub)
dvb_sub_free (overlay->dvb_sub);
@ -448,243 +456,6 @@ gst_dvbsub_overlay_getcaps (GstPad * pad, GstCaps * filter)
return caps;
}
static void
blit_i420 (GstDVBSubOverlay * overlay, DVBSubtitles * subs,
GstVideoFrame * frame)
{
guint counter;
DVBSubtitleRect *sub_region;
gint a1, a2, a3, a4;
gint y1, y2, y3, y4;
gint u1, u2, u3, u4;
gint v1, v2, v3, v4;
guint32 color;
const guint8 *src;
guint8 *dst_y, *dst_y2, *dst_u, *dst_v;
gint x, y;
gint w2;
gint width;
gint height;
gint src_stride;
guint8 *y_data, *u_data, *v_data;
gint y_stride, u_stride, v_stride;
gint scale = 0;
gint scale_x = 0, scale_y = 0; /* 16.16 fixed point */
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
y_data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
u_data = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
v_data = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
y_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
u_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
v_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
if (width != subs->display_def.display_width &&
height != subs->display_def.display_height) {
scale = 1;
if (subs->display_def.window_flag) {
scale_x = (width << 16) / subs->display_def.window_width;
scale_y = (height << 16) / subs->display_def.window_height;
} else {
scale_x = (width << 16) / subs->display_def.display_width;
scale_y = (height << 16) / subs->display_def.display_height;
}
}
for (counter = 0; counter < subs->num_rects; counter++) {
gint dw, dh, dx, dy;
gint32 sx = 0, sy; /* 16.16 fixed point */
gint32 xstep, ystep; /* 16.16 fixed point */
sub_region = &subs->rects[counter];
if (sub_region->y > height || sub_region->x > width)
continue;
/* blend subtitles onto the video frame */
dx = sub_region->x;
dy = sub_region->y;
dw = sub_region->w;
dh = sub_region->h;
if (scale) {
dx = (dx * scale_x) >> 16;
dy = (dy * scale_y) >> 16;
dw = (dw * scale_x) >> 16;
dh = (dh * scale_y) >> 16;
/* apply subtitle window offsets after scaling */
if (subs->display_def.window_flag) {
dx += subs->display_def.window_x;
dy += subs->display_def.window_y;
}
}
dw = MIN (dw, width - dx);
dh = MIN (dh, height - dx);
xstep = (sub_region->w << 16) / dw;
ystep = (sub_region->h << 16) / dh;
w2 = (dw + 1) / 2;
src_stride = sub_region->pict.rowstride;
src = sub_region->pict.data;
dst_y = y_data + dy * y_stride + dx;
dst_y2 = y_data + (dy + 1) * y_stride + dx;
dst_u = u_data + ((dy + 1) / 2) * u_stride + (dx + 1) / 2;
dst_v = v_data + ((dy + 1) / 2) * v_stride + (dx + 1) / 2;
sy = 0;
for (y = 0; y < dh - 1; y += 2) {
sx = 0;
for (x = 0; x < dw - 1; x += 2) {
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + (sx >> 16)]];
a1 = (color >> 24) & 0xff;
y1 = (color >> 16) & 0xff;
u1 = ((color >> 8) & 0xff) * a1;
v1 = (color & 0xff) * a1;
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + ((sx +
xstep) >> 16)]];
a2 = (color >> 24) & 0xff;
y2 = (color >> 16) & 0xff;
u2 = ((color >> 8) & 0xff) * a2;
v2 = (color & 0xff) * a2;
color =
sub_region->pict.palette[src[((sy + ystep) >> 16) * src_stride +
(sx >> 16)]];
a3 = (color >> 24) & 0xff;
y3 = (color >> 16) & 0xff;
u3 = ((color >> 8) & 0xff) * a3;
v3 = (color & 0xff) * a3;
color =
sub_region->pict.palette[src[((sy + ystep) >> 16) * src_stride +
((sx + xstep) >> 16)]];
a4 = (color >> 24) & 0xff;
y4 = (color >> 16) & 0xff;
u4 = ((color >> 8) & 0xff) * a4;
v4 = (color & 0xff) * a4;
dst_y[0] = (a1 * y1 + (255 - a1) * dst_y[0]) / 255;
dst_y[1] = (a2 * y2 + (255 - a2) * dst_y[1]) / 255;
dst_y2[0] = (a3 * y3 + (255 - a3) * dst_y2[0]) / 255;
dst_y2[1] = (a4 * y4 + (255 - a4) * dst_y2[1]) / 255;
a1 = (a1 + a2 + a3 + a4) / 4;
dst_u[0] = ((u1 + u2 + u3 + u4) / 4 + (255 - a1) * dst_u[0]) / 255;
dst_v[0] = ((v1 + v2 + v3 + v4) / 4 + (255 - a1) * dst_v[0]) / 255;
dst_y += 2;
dst_y2 += 2;
dst_u += 1;
dst_v += 1;
sx += 2 * xstep;
}
/* Odd width */
if (x < dw) {
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + (sx >> 16)]];
a1 = (color >> 24) & 0xff;
y1 = (color >> 16) & 0xff;
u1 = ((color >> 8) & 0xff) * a1;
v1 = (color & 0xff) * a1;
color =
sub_region->pict.palette[src[((sy + ystep) >> 16) * src_stride +
(sx >> 16)]];
a3 = (color >> 24) & 0xff;
y3 = (color >> 16) & 0xff;
u3 = ((color >> 8) & 0xff) * a3;
v3 = (color & 0xff) * a3;
dst_y[0] = (a1 * y1 + (255 - a1) * dst_y[0]) / 255;
dst_y2[0] = (a3 * y3 + (255 - a3) * dst_y2[0]) / 255;
a1 = (a1 + a3) / 2;
dst_u[0] = ((u1 + u3) / 2 + (255 - a1) * dst_u[0]) / 255;
dst_v[0] = ((v1 + v3) / 2 + (255 - a1) * dst_v[0]) / 255;
dst_y += 1;
dst_y2 += 1;
dst_u += 1;
dst_v += 1;
sx += xstep;
}
sy += 2 * ystep;
dst_y += y_stride + (y_stride - dw);
dst_y2 += y_stride + (y_stride - dw);
dst_u += u_stride - w2;
dst_v += v_stride - w2;
}
/* Odd height */
if (y < dh) {
sx = 0;
for (x = 0; x < dw - 1; x += 2) {
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + (sx >> 16)]];
a1 = (color >> 24) & 0xff;
y1 = (color >> 16) & 0xff;
u1 = ((color >> 8) & 0xff) * a1;
v1 = (color & 0xff) * a1;
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + ((sx +
xstep) >> 16)]];
a2 = (color >> 24) & 0xff;
y2 = (color >> 16) & 0xff;
u2 = ((color >> 8) & 0xff) * a2;
v2 = (color & 0xff) * a2;
dst_y[0] = (a1 * y1 + (255 - a1) * dst_y[0]) / 255;
dst_y[1] = (a2 * y2 + (255 - a2) * dst_y[1]) / 255;
a1 = (a1 + a2) / 2;
dst_u[0] = ((u1 + u2) / 2 + (255 - a1) * dst_u[0]) / 255;
dst_v[0] = ((v1 + v2) / 2 + (255 - a1) * dst_v[0]) / 255;
dst_y += 2;
dst_u += 1;
dst_v += 1;
sx += 2 * xstep;
}
/* Odd height and width */
if (x < dw) {
color =
sub_region->pict.palette[src[(sy >> 16) * src_stride + (sx >> 16)]];
a1 = (color >> 24) & 0xff;
y1 = (color >> 16) & 0xff;
u1 = ((color >> 8) & 0xff) * a1;
v1 = (color & 0xff) * a1;
dst_y[0] = (a1 * y1 + (255 - a1) * dst_y[0]) / 255;
dst_u[0] = (u1 + (255 - a1) * dst_u[0]) / 255;
dst_v[0] = (v1 + (255 - a1) * dst_v[0]) / 255;
dst_y += 1;
dst_u += 1;
dst_v += 1;
sx += xstep;
}
}
}
GST_LOG_OBJECT (overlay, "amount of rendered DVBSubtitleRect: %u", counter);
}
static gboolean
gst_dvbsub_overlay_setcaps_video (GstPad * pad, GstCaps * caps)
{
@ -851,6 +622,114 @@ gst_dvbsub_overlay_chain_text (GstPad * pad, GstObject * parent,
return GST_FLOW_OK;
}
static GstVideoOverlayComposition *
gst_dvbsub_overlay_subs_to_comp (GstDVBSubOverlay * overlay,
DVBSubtitles * subs)
{
GstVideoOverlayComposition *comp = NULL;
GstVideoOverlayRectangle *rect;
gint width, height, dw, dh, wx, wy;
gint i;
g_return_val_if_fail (subs != NULL && subs->num_rects > 0, NULL);
width = GST_VIDEO_INFO_WIDTH (&overlay->info);
height = GST_VIDEO_INFO_HEIGHT (&overlay->info);
dw = subs->display_def.display_width;
dh = subs->display_def.display_height;
GST_LOG_OBJECT (overlay,
"converting %d rectangles for display %dx%d -> video %dx%d",
subs->num_rects, dw, dh, width, height);
if (subs->display_def.window_flag) {
wx = subs->display_def.window_x;
wy = subs->display_def.window_y;
GST_LOG_OBJECT (overlay, "display window %dx%d @ (%d, %d)",
subs->display_def.window_width, subs->display_def.window_height,
wx, wy);
} else {
wx = 0;
wy = 0;
}
for (i = 0; i < subs->num_rects; i++) {
DVBSubtitleRect *srect = &subs->rects[i];
GstBuffer *buf;
gint w, h;
guint8 *in_data;
guint32 *palette, *data;
gint rx, ry, rw, rh, stride;
gint k, l;
GstMapInfo map;
GST_LOG_OBJECT (overlay, "rectangle %d: %dx%d @ (%d, %d)", i,
srect->w, srect->h, srect->x, srect->y);
w = srect->w;
h = srect->h;
buf = gst_buffer_new_and_alloc (w * h * 4);
gst_buffer_map (buf, &map, GST_MAP_WRITE);
data = (guint32 *) map.data;
in_data = srect->pict.data;
palette = srect->pict.palette;
stride = srect->pict.rowstride;
for (k = 0; k < h; k++) {
for (l = 0; l < w; l++) {
guint32 ayuv;
gint a, y, u, v, r, g, b;
/* convert ayuv to argb */
ayuv = palette[*in_data];
a = ayuv >> 24;
y = (ayuv >> 16) & 0xff;
u = (ayuv >> 8) & 0xff;
v = (ayuv & 0xff);
r = (298 * y + 459 * v - 63514) >> 8;
g = (298 * y - 55 * u - 136 * v + 19681) >> 8;
b = (298 * y + 541 * u - 73988) >> 8;
r = CLAMP (r, 0, 255);
g = CLAMP (g, 0, 255);
b = CLAMP (b, 0, 255);
*data = ((a << 24) | (r << 16) | (g << 8) | b);
in_data++;
data++;
}
in_data += stride - w;
}
gst_buffer_unmap (buf, &map);
/* this is assuming the subtitle rectangle coordinates are relative
* to the window (if there is one) within a display of specified dimension.
* Coordinate wrt the latter is then scaled to the actual dimension of
* the video we are dealing with here. */
rx = gst_util_uint64_scale (wx + srect->x, width, dw);
ry = gst_util_uint64_scale (wy + srect->y, height, dh);
rw = gst_util_uint64_scale (srect->w, width, dw);
rh = gst_util_uint64_scale (srect->h, height, dh);
GST_LOG_OBJECT (overlay, "rectangle %d rendered: %dx%d @ (%d, %d)", i,
rw, rh, rx, ry);
rect = gst_video_overlay_rectangle_new_argb (buf, w, h, 4 * w,
rx, ry, rw, rh, 0);
g_assert (rect);
if (comp) {
gst_video_overlay_composition_add_rectangle (comp, rect);
} else {
comp = gst_video_overlay_composition_new (rect);
}
}
return comp;
}
static GstFlowReturn
gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
GstBuffer * buffer)
@ -955,7 +834,10 @@ gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
candidate->num_rects);
dvb_subtitles_free (overlay->current_subtitle);
overlay->current_subtitle = candidate;
/* FIXME: Pre-convert current_subtitle to a quick-blend format, num_rects=0 means that there are no regions, e.g, a subtitle "clear" happened */
if (overlay->current_comp)
gst_video_overlay_composition_unref (overlay->current_comp);
overlay->current_comp =
gst_dvbsub_overlay_subs_to_comp (overlay, overlay->current_subtitle);
}
}
@ -978,7 +860,9 @@ gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
buffer = gst_buffer_make_writable (buffer);
gst_video_frame_map (&frame, &overlay->info, buffer, GST_MAP_WRITE);
blit_i420 (overlay, overlay->current_subtitle, &frame);
g_assert (overlay->current_comp);
buffer = gst_buffer_make_writable (buffer);
gst_video_overlay_composition_blend (overlay->current_comp, &frame);
gst_video_frame_unmap (&frame);
}
g_mutex_unlock (&overlay->dvbsub_mutex);

View file

@ -22,6 +22,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/video-overlay-composition.h>
#include "dvb-sub.h"
@ -54,6 +55,7 @@ struct _GstDVBSubOverlay
GstVideoInfo info;
DVBSubtitles *current_subtitle; /* The currently active set of subtitle regions, if any */
GstVideoOverlayComposition *current_comp;
GQueue *pending_subtitles; /* A queue of raw subtitle region sets with
* metadata that are waiting their running time */