plugins: cope with new GstVaapiVideoMeta API.

Update plugin elements with the new GstVaapiVideoMeta API.

This also fixes support for subpictures/overlay because GstVideoDecoder
generates a sub-buffer from the GstVaapiVideoBuffer. So, that sub-buffer
is marked as read-only. However, when comes in the textoverlay element
for example, it checks whether the input buffer is writable. Since that
buffer read-only, then a new GstBuffer is created. Since gst_buffer_copy()
does not preserve the parent field, the generated buffer in textoverlay
is not exploitable because we lost all VA specific information.

Now, with GstVaapiVideoMeta information attached to a standard GstBuffer,
all information are preserved through gst_buffer_copy() since the latter
does copy metadata (qdata in this case).
This commit is contained in:
Gwenole Beauchesne 2013-01-05 17:55:47 +01:00
parent d6bbc652b4
commit a00ae0918a
8 changed files with 82 additions and 148 deletions

View file

@ -30,7 +30,7 @@
#include "gst/vaapi/sysdeps.h"
#include <gst/vaapi/gstvaapidisplay.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#include <gst/video/videocontext.h>
#include "gstvaapidecode.h"
@ -224,13 +224,11 @@ gst_vaapidecode_handle_frame(GstVideoDecoder *vdec, GstVideoCodecFrame *frame)
gst_vaapi_surface_proxy_set_user_data(proxy,
decode, (GDestroyNotify)gst_vaapidecode_release);
out_frame->output_buffer = gst_vaapi_video_buffer_new(decode->display);
out_frame->output_buffer =
gst_vaapi_video_buffer_new_with_surface_proxy(proxy);
if (!out_frame->output_buffer)
goto error_create_buffer;
gst_vaapi_video_buffer_set_surface_proxy(
GST_VAAPI_VIDEO_BUFFER(out_frame->output_buffer), proxy);
ret = gst_video_decoder_finish_frame(vdec, out_frame);
if (ret != GST_FLOW_OK)
goto error_commit_buffer;

View file

@ -31,7 +31,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/videocontext.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#include "gstvaapidownload.h"
#include "gstvaapipluginutil.h"
@ -333,14 +333,14 @@ get_surface_format(GstVaapiSurface *surface)
static gboolean
gst_vaapidownload_update_src_caps(GstVaapiDownload *download, GstBuffer *buffer)
{
GstVaapiVideoBuffer *vbuffer;
GstVaapiVideoMeta *meta;
GstVaapiSurface *surface;
GstVaapiImageFormat format;
GstPad *srcpad;
GstCaps *in_caps, *out_caps;
vbuffer = GST_VAAPI_VIDEO_BUFFER(buffer);
surface = gst_vaapi_video_buffer_get_surface(vbuffer);
meta = gst_buffer_get_vaapi_video_meta(buffer);
surface = gst_vaapi_video_meta_get_surface(meta);
if (!surface) {
GST_WARNING("failed to retrieve VA surface from buffer");
return FALSE;
@ -395,13 +395,13 @@ gst_vaapidownload_transform(
)
{
GstVaapiDownload * const download = GST_VAAPIDOWNLOAD(trans);
GstVaapiVideoBuffer *vbuffer;
GstVaapiVideoMeta *meta;
GstVaapiSurface *surface;
GstVaapiImage *image = NULL;
gboolean success;
vbuffer = GST_VAAPI_VIDEO_BUFFER(inbuf);
surface = gst_vaapi_video_buffer_get_surface(vbuffer);
meta = gst_buffer_get_vaapi_video_meta(inbuf);
surface = gst_vaapi_video_meta_get_surface(meta);
if (!surface)
return GST_FLOW_UNEXPECTED;

View file

@ -23,115 +23,73 @@
# include "config.h"
#endif
#include <gst/vaapi/gstvaapivideometa.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideobuffer_priv.h>
#if USE_GLX
# include <gst/vaapi/gstvaapivideobuffer_glx.h>
# include <gst/vaapi/gstvaapivideoconverter_glx.h>
#endif
#include "gstvaapipluginbuffer.h"
static GType
get_type(GstVaapiDisplay *display)
static GFunc
get_surface_converter(GstVaapiDisplay *display)
{
GType type;
GFunc func;
switch (gst_vaapi_display_get_display_type(display)) {
#if USE_GLX
case GST_VAAPI_DISPLAY_TYPE_GLX:
type = GST_VAAPI_TYPE_VIDEO_BUFFER_GLX;
func = (GFunc)gst_vaapi_video_converter_glx_new;
break;
#endif
default:
type = GST_VAAPI_TYPE_VIDEO_BUFFER;
func = NULL;
break;
}
return type;
return func;
}
GstBuffer *
gst_vaapi_video_buffer_new(GstVaapiDisplay *display)
static GstBuffer *
get_buffer(GstVaapiVideoMeta *meta)
{
g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
GstBuffer *buffer;
return gst_vaapi_video_buffer_typed_new(get_type(display), display);
if (!meta)
return NULL;
gst_vaapi_video_meta_set_surface_converter(meta,
get_surface_converter(gst_vaapi_video_meta_get_display(meta)));
buffer = gst_vaapi_video_buffer_new(meta);
gst_vaapi_video_meta_unref(meta);
return buffer;
}
GstBuffer *
gst_vaapi_video_buffer_new_from_pool(GstVaapiVideoPool *pool)
{
GstVaapiDisplay *display;
g_return_val_if_fail(GST_VAAPI_IS_VIDEO_POOL(pool), NULL);
display = gst_vaapi_video_pool_get_display(pool);
if (!display)
return NULL;
return gst_vaapi_video_buffer_typed_new_from_pool(get_type(display), pool);
return get_buffer(gst_vaapi_video_meta_new_from_pool(pool));
}
GstBuffer *
gst_vaapi_video_buffer_new_from_buffer(GstBuffer *buffer)
{
GstVaapiVideoBuffer *vbuffer;
GstVaapiDisplay *display;
g_return_val_if_fail(GST_VAAPI_IS_VIDEO_BUFFER(buffer), NULL);
vbuffer = GST_VAAPI_VIDEO_BUFFER(buffer);
display = gst_vaapi_video_buffer_get_display(vbuffer);
if (!display)
return NULL;
return gst_vaapi_video_buffer_typed_new_from_buffer(
get_type(display), buffer);
return get_buffer(gst_buffer_get_vaapi_video_meta(buffer));
}
GstBuffer *
gst_vaapi_video_buffer_new_with_image(GstVaapiImage *image)
{
GstVaapiDisplay *display;
g_return_val_if_fail(GST_VAAPI_IS_IMAGE(image), NULL);
display = gst_vaapi_object_get_display(GST_VAAPI_OBJECT(image));
if (!display)
return NULL;
return gst_vaapi_video_buffer_typed_new_with_image(
get_type(display), image);
return get_buffer(gst_vaapi_video_meta_new_with_image(image));
}
GstBuffer *
gst_vaapi_video_buffer_new_with_surface(GstVaapiSurface *surface)
{
GstVaapiDisplay *display;
g_return_val_if_fail(GST_VAAPI_IS_SURFACE(surface), NULL);
display = gst_vaapi_object_get_display(GST_VAAPI_OBJECT(surface));
if (!display)
return NULL;
return gst_vaapi_video_buffer_typed_new_with_surface(
get_type(display), surface);
return get_buffer(gst_vaapi_video_meta_new_with_surface(surface));
}
GstBuffer *
gst_vaapi_video_buffer_new_with_surface_proxy(GstVaapiSurfaceProxy *proxy)
{
GstVaapiDisplay *display;
GstVaapiSurface *surface;
g_return_val_if_fail(proxy != NULL, NULL);
surface = gst_vaapi_surface_proxy_get_surface(proxy);
if (!surface)
return NULL;
display = gst_vaapi_object_get_display(GST_VAAPI_OBJECT(surface));
if (!display)
return NULL;
return gst_vaapi_video_buffer_typed_new_with_surface_proxy(
get_type(display), proxy);
return get_buffer(gst_vaapi_video_meta_new_with_surface_proxy(proxy));
}

View file

@ -22,10 +22,6 @@
#ifndef GST_VAAPI_PLUGIN_BUFFER_H
#define GST_VAAPI_PLUGIN_BUFFER_H
G_GNUC_INTERNAL
GstBuffer *
gst_vaapi_video_buffer_new(GstVaapiDisplay *display);
G_GNUC_INTERNAL
GstBuffer *
gst_vaapi_video_buffer_new_from_pool(GstVaapiVideoPool *pool);

View file

@ -31,7 +31,7 @@
#include "gst/vaapi/sysdeps.h"
#include <gst/video/video.h>
#include <gst/video/videocontext.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#include "gstvaapipostproc.h"
#include "gstvaapipluginutil.h"
@ -255,7 +255,7 @@ gst_vaapipostproc_stop(GstVaapiPostproc *postproc)
static GstFlowReturn
gst_vaapipostproc_process(GstVaapiPostproc *postproc, GstBuffer *buf)
{
GstVaapiVideoBuffer *vbuf;
GstVaapiVideoMeta *meta;
GstVaapiSurfaceProxy *proxy;
GstClockTime timestamp;
GstFlowReturn ret;
@ -263,18 +263,15 @@ gst_vaapipostproc_process(GstVaapiPostproc *postproc, GstBuffer *buf)
guint outbuf_flags, flags;
gboolean tff;
if (GST_VAAPI_IS_VIDEO_BUFFER(buf))
vbuf = GST_VAAPI_VIDEO_BUFFER(buf);
else if (GST_VAAPI_IS_VIDEO_BUFFER(buf->parent))
vbuf = GST_VAAPI_VIDEO_BUFFER(buf->parent);
else
meta = gst_buffer_get_vaapi_video_meta(buf);
if (!meta)
goto error_invalid_buffer;
flags = gst_vaapi_video_buffer_get_render_flags(vbuf);
flags = gst_vaapi_video_meta_get_render_flags(meta);
/* Deinterlacing disabled, push frame */
if (!postproc->deinterlace) {
gst_vaapi_video_buffer_set_render_flags(vbuf, flags);
gst_vaapi_video_meta_set_render_flags(meta, flags);
ret = gst_pad_push(postproc->srcpad, buf);
if (ret != GST_FLOW_OK)
goto error_push_buffer;
@ -282,7 +279,7 @@ gst_vaapipostproc_process(GstVaapiPostproc *postproc, GstBuffer *buf)
}
timestamp = GST_BUFFER_TIMESTAMP(buf);
proxy = gst_vaapi_video_buffer_get_surface_proxy(vbuf);
proxy = gst_vaapi_video_meta_get_surface_proxy(meta);
tff = GST_BUFFER_FLAG_IS_SET(buf, GST_VIDEO_BUFFER_TFF);
flags &= ~(GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD|
@ -293,14 +290,14 @@ gst_vaapipostproc_process(GstVaapiPostproc *postproc, GstBuffer *buf)
if (!outbuf)
goto error_create_buffer;
vbuf = GST_VAAPI_VIDEO_BUFFER(outbuf);
meta = gst_buffer_get_vaapi_video_meta(outbuf);
outbuf_flags = flags;
outbuf_flags |= postproc->deinterlace ? (
tff ?
GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD :
GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD) :
GST_VAAPI_PICTURE_STRUCTURE_FRAME;
gst_vaapi_video_buffer_set_render_flags(vbuf, outbuf_flags);
gst_vaapi_video_meta_set_render_flags(meta, outbuf_flags);
GST_BUFFER_TIMESTAMP(outbuf) = timestamp;
GST_BUFFER_DURATION(outbuf) = postproc->field_duration;
@ -314,14 +311,14 @@ gst_vaapipostproc_process(GstVaapiPostproc *postproc, GstBuffer *buf)
if (!outbuf)
goto error_create_buffer;
vbuf = GST_VAAPI_VIDEO_BUFFER(outbuf);
meta = gst_buffer_get_vaapi_video_meta(outbuf);
outbuf_flags = flags;
outbuf_flags |= postproc->deinterlace ? (
tff ?
GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD :
GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD) :
GST_VAAPI_PICTURE_STRUCTURE_FRAME;
gst_vaapi_video_buffer_set_render_flags(vbuf, outbuf_flags);
gst_vaapi_video_meta_set_render_flags(meta, outbuf_flags);
GST_BUFFER_TIMESTAMP(outbuf) = timestamp + postproc->field_duration;
GST_BUFFER_DURATION(outbuf) = postproc->field_duration;

View file

@ -34,7 +34,7 @@
#include <gst/video/video.h>
#include <gst/video/videocontext.h>
#include <gst/vaapi/gstvaapivalue.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#if USE_DRM
# include <gst/vaapi/gstvaapidisplay_drm.h>
#endif
@ -858,33 +858,31 @@ gst_vaapisink_show_frame(GstBaseSink *base_sink, GstBuffer *src_buffer)
{
GstVaapiSink * const sink = GST_VAAPISINK(base_sink);
GstVideoOverlayComposition *composition;
GstVaapiVideoBuffer *vbuffer;
GstVaapiVideoMeta *meta;
GstVaapiSurface *surface;
GstBuffer *buffer;
guint flags;
gboolean success;
if (GST_VAAPI_IS_VIDEO_BUFFER(src_buffer))
meta = gst_buffer_get_vaapi_video_meta(src_buffer);
if (meta)
buffer = gst_buffer_ref(src_buffer);
else if (GST_VAAPI_IS_VIDEO_BUFFER(src_buffer->parent))
buffer = gst_buffer_ref(src_buffer->parent);
else if (sink->use_video_raw)
else if (sink->use_video_raw) {
buffer = gst_vaapi_uploader_get_buffer(sink->uploader);
if (!buffer)
return GST_FLOW_UNEXPECTED;
if (!gst_vaapi_uploader_process(sink->uploader, src_buffer, buffer))
goto error;
meta = gst_buffer_get_vaapi_video_meta(buffer);
if (!meta)
goto error;
}
else
buffer = NULL;
if (!buffer)
return GST_FLOW_UNEXPECTED;
if (sink->use_video_raw && !gst_vaapi_uploader_process(sink->uploader,
src_buffer, buffer))
goto error;
vbuffer = GST_VAAPI_VIDEO_BUFFER(buffer);
g_return_val_if_fail(vbuffer != NULL, GST_FLOW_UNEXPECTED);
if (sink->display != gst_vaapi_video_buffer_get_display (vbuffer)) {
g_clear_object(&sink->display);
sink->display = g_object_ref (gst_vaapi_video_buffer_get_display (vbuffer));
if (sink->display != gst_vaapi_video_meta_get_display(meta)) {
g_clear_object(&sink->display);
sink->display = g_object_ref(gst_vaapi_video_meta_get_display(meta));
}
if (!sink->window)
@ -892,14 +890,14 @@ gst_vaapisink_show_frame(GstBaseSink *base_sink, GstBuffer *src_buffer)
gst_vaapisink_ensure_rotation(sink, TRUE);
surface = gst_vaapi_video_buffer_get_surface(vbuffer);
surface = gst_vaapi_video_meta_get_surface(meta);
if (!surface)
goto error;
GST_DEBUG("render surface %" GST_VAAPI_ID_FORMAT,
GST_VAAPI_ID_ARGS(gst_vaapi_surface_get_id(surface)));
flags = gst_vaapi_video_buffer_get_render_flags(vbuffer);
flags = gst_vaapi_video_meta_get_render_flags(meta);
composition = gst_video_buffer_get_overlay_composition(src_buffer);
if (!gst_vaapi_surface_set_subpictures_from_composition(surface,

View file

@ -32,7 +32,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/videocontext.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#include "gstvaapiupload.h"
#include "gstvaapipluginutil.h"
@ -465,12 +465,8 @@ gst_vaapiupload_prepare_output_buffer(
if (!gst_vaapi_uploader_has_direct_rendering(upload->uploader))
buffer = gst_vaapi_uploader_get_buffer(upload->uploader);
else if (GST_VAAPI_IS_VIDEO_BUFFER(inbuf))
buffer = gst_vaapi_video_buffer_new_from_buffer(inbuf);
else if (GST_VAAPI_IS_VIDEO_BUFFER(inbuf->parent))
buffer = gst_vaapi_video_buffer_new_from_buffer(inbuf->parent);
else
buffer = NULL;
buffer = gst_vaapi_video_buffer_new_from_buffer(inbuf);
if (!buffer)
return GST_FLOW_UNEXPECTED;

View file

@ -26,7 +26,7 @@
#include <gst/vaapi/gstvaapisurface.h>
#include <gst/vaapi/gstvaapiimagepool.h>
#include <gst/vaapi/gstvaapisurfacepool.h>
#include <gst/vaapi/gstvaapivideobuffer.h>
#include <gst/vaapi/gstvaapivideometa.h>
#include "gstvaapiuploader.h"
#include "gstvaapipluginbuffer.h"
@ -370,46 +370,36 @@ gst_vaapi_uploader_process(
GstBuffer *out_buffer
)
{
GstVaapiVideoBuffer *out_vbuffer;
GstVaapiVideoMeta *src_meta, *out_meta;
GstVaapiSurface *surface;
GstVaapiImage *image;
g_return_val_if_fail(GST_VAAPI_IS_UPLOADER(uploader), FALSE);
if (GST_VAAPI_IS_VIDEO_BUFFER(out_buffer))
out_vbuffer = GST_VAAPI_VIDEO_BUFFER(out_buffer);
else if (GST_VAAPI_IS_VIDEO_BUFFER(out_buffer->parent))
out_vbuffer = GST_VAAPI_VIDEO_BUFFER(out_buffer->parent);
else {
out_meta = gst_buffer_get_vaapi_video_meta(out_buffer);
if (!out_meta) {
GST_WARNING("expected an output video buffer");
return FALSE;
}
surface = gst_vaapi_video_buffer_get_surface(out_vbuffer);
surface = gst_vaapi_video_meta_get_surface(out_meta);
g_return_val_if_fail(surface != NULL, FALSE);
if (GST_VAAPI_IS_VIDEO_BUFFER(src_buffer)) {
src_meta = gst_buffer_get_vaapi_video_meta(src_buffer);
if (src_meta) {
/* GstVaapiVideoBuffer with mapped VA image */
image = gst_vaapi_video_buffer_get_image(
GST_VAAPI_VIDEO_BUFFER(src_buffer));
if (!image || !gst_vaapi_image_unmap(image))
return FALSE;
}
else if (GST_VAAPI_IS_VIDEO_BUFFER(src_buffer->parent)) {
/* Sub-buffer from GstVaapiVideoBuffer with mapped VA image */
image = gst_vaapi_video_buffer_get_image(
GST_VAAPI_VIDEO_BUFFER(src_buffer->parent));
image = gst_vaapi_video_meta_get_image(src_meta);
if (!image || !gst_vaapi_image_unmap(image))
return FALSE;
}
else {
/* Regular GstBuffer that needs to be uploaded to a VA image */
image = gst_vaapi_video_buffer_get_image(out_vbuffer);
image = gst_vaapi_video_meta_get_image(out_meta);
if (!image) {
image = gst_vaapi_video_pool_get_object(uploader->priv->images);
if (!image)
return FALSE;
gst_vaapi_video_buffer_set_image(out_vbuffer, image);
gst_vaapi_video_meta_set_image(out_meta, image);
}
if (!gst_vaapi_image_update_from_buffer(image, src_buffer, NULL))
return FALSE;
@ -443,8 +433,8 @@ gst_vaapi_uploader_get_buffer(GstVaapiUploader *uploader)
GstVaapiUploaderPrivate *priv;
GstVaapiSurface *surface;
GstVaapiImage *image;
GstVaapiVideoBuffer *vbuffer;
GstBuffer *buffer = NULL;
GstVaapiVideoMeta *meta;
GstBuffer *buffer;
g_return_val_if_fail(GST_VAAPI_IS_UPLOADER(uploader), NULL);
@ -455,7 +445,8 @@ gst_vaapi_uploader_get_buffer(GstVaapiUploader *uploader)
GST_WARNING("failed to allocate video buffer");
goto error;
}
vbuffer = GST_VAAPI_VIDEO_BUFFER(buffer);
meta = gst_buffer_get_vaapi_video_meta(buffer);
surface = gst_vaapi_video_pool_get_object(priv->surfaces);
if (!surface) {
@ -463,9 +454,9 @@ gst_vaapi_uploader_get_buffer(GstVaapiUploader *uploader)
goto error;
}
gst_vaapi_video_buffer_set_surface(vbuffer, surface);
gst_vaapi_video_meta_set_surface(meta, surface);
image = gst_vaapi_video_buffer_get_image(vbuffer);
image = gst_vaapi_video_meta_get_image(meta);
if (!gst_vaapi_image_map(image)) {
GST_WARNING("failed to map VA image");
goto error;