plugins: use new video buffer pools.

Use new GstVaapiVideoBufferPool to maintain video buffers. Implement
GstBaseSink::propose_allocation() to expose that pool to upstream
elements; and also implement GstVideoDecoder::decide_allocation() to
actually use that pool (from downstream), if any, or create one.

Signed-off-by: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
This commit is contained in:
Sreerenj Balachandran 2012-11-08 16:41:22 +02:00 committed by Gwenole Beauchesne
parent 5b11b83321
commit 551ac4c5b3
3 changed files with 209 additions and 3 deletions

View file

@ -35,6 +35,8 @@
#include "gstvaapidecode.h"
#include "gstvaapipluginutil.h"
#include "gstvaapivideobuffer.h"
#include "gstvaapivideobufferpool.h"
#include "gstvaapivideomemory.h"
#include <gst/vaapi/gstvaapidecoder_h264.h>
#include <gst/vaapi/gstvaapidecoder_jpeg.h>
@ -154,7 +156,7 @@ gst_vaapidecode_update_src_caps(GstVaapiDecode *decode,
{
GstVideoDecoder * const vdec = GST_VIDEO_DECODER(decode);
GstVideoCodecState *state;
GstVideoInfo *vi;
GstVideoInfo *vi, vis;
state = gst_video_decoder_set_output_state(vdec,
GST_VIDEO_INFO_FORMAT(&ref_state->info),
@ -163,6 +165,12 @@ gst_vaapidecode_update_src_caps(GstVaapiDecode *decode,
return FALSE;
vi = &state->info;
if (GST_VIDEO_INFO_FORMAT(vi) == GST_VIDEO_FORMAT_ENCODED) {
gst_video_info_init(&vis);
gst_video_info_set_format(&vis, GST_VIDEO_FORMAT_NV12,
GST_VIDEO_INFO_WIDTH(vi), GST_VIDEO_INFO_HEIGHT(vi));
vi->size = vis.size;
}
gst_video_codec_state_unref(state);
/* XXX: gst_video_info_to_caps() from GStreamer 0.10 does not
@ -263,6 +271,7 @@ gst_vaapidecode_push_decoded_frames(GstVideoDecoder *vdec)
GstVaapiDecode * const decode = GST_VAAPIDECODE(vdec);
GstVaapiSurfaceProxy *proxy;
GstVaapiDecoderStatus status;
GstVaapiVideoMeta *meta;
GstVideoCodecFrame *out_frame;
GstFlowReturn ret;
@ -278,10 +287,21 @@ gst_vaapidecode_push_decoded_frames(GstVideoDecoder *vdec)
gst_vaapi_surface_proxy_set_user_data(proxy,
decode, (GDestroyNotify)gst_vaapidecode_release);
#if GST_CHECK_VERSION(1,0,0)
ret = gst_video_decoder_allocate_output_frame(vdec, out_frame);
if (ret != GST_FLOW_OK)
goto error_create_buffer;
meta = gst_buffer_get_vaapi_video_meta(out_frame->output_buffer);
if (!meta)
goto error_get_meta;
gst_vaapi_video_meta_set_surface_proxy(meta, proxy);
#else
out_frame->output_buffer =
gst_vaapi_video_buffer_new_with_surface_proxy(proxy);
if (!out_frame->output_buffer)
goto error_create_buffer;
#endif
}
ret = gst_video_decoder_finish_frame(vdec, out_frame);
@ -307,6 +327,13 @@ error_create_buffer:
gst_video_codec_frame_unref(out_frame);
return GST_FLOW_EOS;
}
error_get_meta:
{
GST_ERROR("failed to get vaapi video meta attached to video buffer");
gst_video_decoder_drop_frame(vdec, out_frame);
gst_video_codec_frame_unref(out_frame);
return GST_FLOW_EOS;
}
error_commit_buffer:
{
GST_DEBUG("video sink rejected the video buffer (error %d)", ret);
@ -345,6 +372,78 @@ error_flush:
}
}
static gboolean
gst_vaapidecode_decide_allocation(GstVideoDecoder *vdec, GstQuery *query)
{
GstVaapiDecode * const decode = GST_VAAPIDECODE(vdec);
GstCaps *caps = NULL;
GstBufferPool *pool;
GstStructure *config;
GstVideoInfo vi;
guint size, min, max;
gboolean need_pool, update_pool;
gst_query_parse_allocation(query, &caps, &need_pool);
if (!caps)
goto error_no_caps;
gst_video_info_init(&vi);
gst_video_info_from_caps(&vi, caps);
if (GST_VIDEO_INFO_FORMAT(&vi) == GST_VIDEO_FORMAT_ENCODED)
gst_video_info_set_format(&vi, GST_VIDEO_FORMAT_NV12,
GST_VIDEO_INFO_WIDTH(&vi), GST_VIDEO_INFO_HEIGHT(&vi));
g_return_val_if_fail(decode->display != NULL, FALSE);
if (gst_query_get_n_allocation_pools(query) > 0) {
gst_query_parse_nth_allocation_pool(query, 0, &pool, &size, &min, &max);
size = MAX(size, vi.size);
update_pool = TRUE;
}
else {
pool = NULL;
size = vi.size;
min = max = 0;
update_pool = FALSE;
}
if (!pool || !gst_buffer_pool_has_option(pool,
GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) {
GST_INFO("no pool or doesn't support GstVaapiVideoMeta, "
"making new pool");
pool = gst_vaapi_video_buffer_pool_new(decode->display);
if (!pool)
goto error_create_pool;
config = gst_buffer_pool_get_config(pool);
gst_buffer_pool_config_set_params(config, caps, size, min, max);
gst_buffer_pool_config_add_option(config,
GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META);
gst_buffer_pool_set_config(pool, config);
}
if (update_pool)
gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
else
gst_query_add_allocation_pool(query, pool, size, min, max);
if (pool)
g_object_unref(pool);
return TRUE;
/* ERRORS */
error_no_caps:
{
GST_ERROR("no caps specified");
return FALSE;
}
error_create_pool:
{
GST_ERROR("failed to create buffer pool");
return FALSE;
}
}
static inline gboolean
gst_vaapidecode_ensure_display(GstVaapiDecode *decode)
{
@ -566,6 +665,9 @@ gst_vaapidecode_class_init(GstVaapiDecodeClass *klass)
vdec_class->handle_frame = GST_DEBUG_FUNCPTR(gst_vaapidecode_handle_frame);
vdec_class->finish = GST_DEBUG_FUNCPTR(gst_vaapidecode_finish);
vdec_class->decide_allocation =
GST_DEBUG_FUNCPTR(gst_vaapidecode_decide_allocation);
gst_element_class_set_static_metadata(element_class,
"VA-API decoder",
"Codec/Decoder/Video",

View file

@ -70,6 +70,8 @@
#include "gstvaapisink.h"
#include "gstvaapipluginutil.h"
#include "gstvaapivideometa.h"
#include "gstvaapivideobufferpool.h"
#include "gstvaapivideomemory.h"
#define GST_PLUGIN_NAME "vaapisink"
#define GST_PLUGIN_DESC "A VA-API based videosink"
@ -588,6 +590,66 @@ end:
return success;
}
static gboolean
gst_vaapisink_ensure_video_buffer_pool(GstVaapiSink *sink, GstCaps *caps)
{
GstBufferPool *pool;
GstCaps *pool_caps;
GstStructure *config;
GstVideoInfo vi;
gboolean need_pool;
if (!gst_vaapisink_ensure_display(sink))
return FALSE;
if (sink->video_buffer_pool) {
config = gst_buffer_pool_get_config(sink->video_buffer_pool);
gst_buffer_pool_config_get_params(config, &pool_caps, NULL, NULL, NULL);
need_pool = !gst_caps_is_equal(caps, pool_caps);
gst_structure_free(config);
if (!need_pool)
return TRUE;
g_clear_object(&sink->video_buffer_pool);
sink->video_buffer_size = 0;
}
pool = gst_vaapi_video_buffer_pool_new(sink->display);
if (!pool)
goto error_create_pool;
gst_video_info_init(&vi);
gst_video_info_from_caps(&vi, caps);
if (GST_VIDEO_INFO_FORMAT(&vi) == GST_VIDEO_FORMAT_ENCODED) {
GST_DEBUG("assume video buffer pool format is NV12");
gst_video_info_set_format(&vi, GST_VIDEO_FORMAT_NV12,
GST_VIDEO_INFO_WIDTH(&vi), GST_VIDEO_INFO_HEIGHT(&vi));
}
sink->video_buffer_size = vi.size;
config = gst_buffer_pool_get_config(pool);
gst_buffer_pool_config_set_params(config, caps, sink->video_buffer_size,
0, 0);
gst_buffer_pool_config_add_option(config,
GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META);
if (!gst_buffer_pool_set_config(pool, config))
goto error_pool_config;
sink->video_buffer_pool = pool;
return TRUE;
/* ERRORS */
error_create_pool:
{
GST_ERROR("failed to create buffer pool");
return FALSE;
}
error_pool_config:
{
GST_ERROR("failed to reset buffer pool config");
g_object_unref(pool);
return FALSE;
}
}
static gboolean
gst_vaapisink_start(GstBaseSink *base_sink)
{
@ -608,6 +670,7 @@ gst_vaapisink_stop(GstBaseSink *base_sink)
GstVaapiSink * const sink = GST_VAAPISINK(base_sink);
gst_buffer_replace(&sink->video_buffer, NULL);
g_clear_object(&sink->video_buffer_pool);
g_clear_object(&sink->window);
g_clear_object(&sink->display);
g_clear_object(&sink->uploader);
@ -645,6 +708,9 @@ gst_vaapisink_set_caps(GstBaseSink *base_sink, GstCaps *caps)
return TRUE;
#endif
if (!gst_vaapisink_ensure_video_buffer_pool(sink, caps))
return FALSE;
if (!gst_video_info_from_caps(&vi, caps))
return FALSE;
sink->use_video_raw = GST_VIDEO_INFO_IS_YUV(&vi);
@ -954,7 +1020,41 @@ error:
return GST_FLOW_EOS;
}
#if !GST_CHECK_VERSION(1,0,0)
#if GST_CHECK_VERSION(1,0,0)
static gboolean
gst_vaapisink_propose_allocation(GstBaseSink *base_sink, GstQuery *query)
{
GstVaapiSink * const sink = GST_VAAPISINK(base_sink);
GstCaps *caps = NULL;
gboolean need_pool;
gst_query_parse_allocation(query, &caps, &need_pool);
if (need_pool) {
if (!caps)
goto error_no_caps;
if (!gst_vaapisink_ensure_video_buffer_pool(sink, caps))
return FALSE;
gst_query_add_allocation_pool(query, sink->video_buffer_pool,
sink->video_buffer_size, 0, 0);
}
gst_query_add_allocation_meta(query,
GST_VAAPI_VIDEO_META_API_TYPE, NULL);
gst_query_add_allocation_meta(query,
GST_VIDEO_META_API_TYPE, NULL);
gst_query_add_allocation_meta(query,
GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
return TRUE;
/* ERRORS */
error_no_caps:
{
GST_ERROR("no caps specified");
return FALSE;
}
}
#else
static GstFlowReturn
gst_vaapisink_buffer_alloc(
GstBaseSink *base_sink,
@ -1104,7 +1204,9 @@ gst_vaapisink_class_init(GstVaapiSinkClass *klass)
basesink_class->preroll = gst_vaapisink_show_frame;
basesink_class->render = gst_vaapisink_show_frame;
basesink_class->query = gst_vaapisink_query;
#if !GST_CHECK_VERSION(1,0,0)
#if GST_CHECK_VERSION(1,0,0)
basesink_class->propose_allocation = gst_vaapisink_propose_allocation;
#else
basesink_class->buffer_alloc = gst_vaapisink_buffer_alloc;
#endif

View file

@ -76,6 +76,8 @@ struct _GstVaapiSink {
guint window_width;
guint window_height;
GstVaapiTexture *texture;
GstBufferPool *video_buffer_pool;
guint video_buffer_size;
GstBuffer *video_buffer;
guint video_width;
guint video_height;