gstreamer/gst-libs/gst/vaapi/gstvaapidecoder.c
Gwenole Beauchesne ea9703362c decoder: add {start,end}_frame() hooks.
The start_frame() hook is called prior to traversing all decode-units
for decoding. The unit argument represents the first slice in the frame.
Some codecs (e.g. H.264) need to wait for the first slice in order to
determine the actual VA context parameters.
2012-12-18 15:31:51 +01:00

789 lines
22 KiB
C

/*
* gstvaapidecoder.c - VA decoder abstraction
*
* Copyright (C) 2010-2011 Splitted-Desktop Systems
* Copyright (C) 2011-2012 Intel Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
/**
* SECTION:gstvaapidecoder
* @short_description: VA decoder abstraction
*/
#include "sysdeps.h"
#include "gstvaapicompat.h"
#include "gstvaapidecoder.h"
#include "gstvaapidecoder_priv.h"
#include "gstvaapiutils.h"
#include "gstvaapi_priv.h"
#define DEBUG 1
#include "gstvaapidebug.h"
G_DEFINE_TYPE(GstVaapiDecoder, gst_vaapi_decoder, G_TYPE_OBJECT)
enum {
PROP_0,
PROP_DISPLAY,
PROP_CAPS,
N_PROPERTIES
};
static GParamSpec *g_properties[N_PROPERTIES] = { NULL, };
static void
parser_state_finalize(GstVaapiParserState *ps)
{
if (ps->input_adapter) {
gst_adapter_clear(ps->input_adapter);
g_object_unref(ps->input_adapter);
ps->input_adapter = NULL;
}
if (ps->output_adapter) {
gst_adapter_clear(ps->output_adapter);
g_object_unref(ps->output_adapter);
ps->output_adapter = NULL;
}
}
static gboolean
parser_state_init(GstVaapiParserState *ps)
{
ps->input_adapter = gst_adapter_new();
if (!ps->input_adapter)
return FALSE;
ps->output_adapter = gst_adapter_new();
if (!ps->output_adapter)
return FALSE;
return TRUE;
}
static inline GstVaapiDecoderUnit *
parser_state_get_pending_unit(GstVaapiParserState *ps, GstAdapter *adapter)
{
GstVaapiDecoderUnit * const unit = ps->pending_unit;
ps->pending_unit = NULL;
return unit;
}
static inline void
parser_state_set_pending_unit(GstVaapiParserState *ps,
GstAdapter *adapter, GstVaapiDecoderUnit *unit)
{
ps->pending_unit = unit;
}
static void
parser_state_prepare(GstVaapiParserState *ps, GstAdapter *adapter)
{
/* XXX: check we really have a continuity from the previous call */
if (ps->current_adapter != adapter)
goto reset;
return;
reset:
ps->current_adapter = adapter;
ps->input_offset2 = -1;
}
static void
destroy_buffer(GstBuffer *buffer)
{
gst_buffer_unref(buffer);
}
static gboolean
push_buffer(GstVaapiDecoder *decoder, GstBuffer *buffer)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (!buffer) {
buffer = gst_buffer_new();
if (!buffer)
return FALSE;
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_EOS);
}
GST_DEBUG("queue encoded data buffer %p (%d bytes)",
buffer, GST_BUFFER_SIZE(buffer));
g_queue_push_tail(priv->buffers, buffer);
return TRUE;
}
static GstBuffer *
pop_buffer(GstVaapiDecoder *decoder)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
GstBuffer *buffer;
buffer = g_queue_pop_head(priv->buffers);
if (!buffer)
return NULL;
GST_DEBUG("dequeue buffer %p for decoding (%d bytes)",
buffer, GST_BUFFER_SIZE(buffer));
return buffer;
}
static GstVaapiDecoderStatus
do_parse(GstVaapiDecoder *decoder,
GstVideoCodecFrame *base_frame, GstAdapter *adapter, gboolean at_eos,
guint *got_unit_size_ptr, gboolean *got_frame_ptr)
{
GstVaapiParserState * const ps = &decoder->priv->parser_state;
GstVaapiDecoderFrame *frame;
GstVaapiDecoderUnit *unit;
GstVaapiDecoderStatus status;
*got_unit_size_ptr = 0;
*got_frame_ptr = FALSE;
frame = gst_video_codec_frame_get_user_data(base_frame);
if (!frame) {
frame = gst_vaapi_decoder_frame_new();
if (!frame)
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
gst_video_codec_frame_set_user_data(base_frame,
frame, (GDestroyNotify)gst_vaapi_mini_object_unref);
}
parser_state_prepare(ps, adapter);
unit = parser_state_get_pending_unit(ps, adapter);
if (unit)
goto got_unit;
ps->current_frame = base_frame;
status = GST_VAAPI_DECODER_GET_CLASS(decoder)->parse(decoder,
adapter, at_eos, &unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
if (unit)
gst_vaapi_decoder_unit_unref(unit);
return status;
}
if (GST_VAAPI_DECODER_UNIT_IS_FRAME_START(unit) && frame->prev_slice) {
parser_state_set_pending_unit(ps, adapter, unit);
goto got_frame;
}
got_unit:
unit->offset = frame->output_offset;
frame->units = g_slist_prepend(frame->units, unit);
frame->output_offset += unit->size;
if (GST_VAAPI_DECODER_UNIT_IS_SLICE(unit))
frame->prev_slice = unit;
*got_unit_size_ptr = unit->size;
if (GST_VAAPI_DECODER_UNIT_IS_FRAME_END(unit)) {
got_frame:
frame->units = g_slist_reverse(frame->units);
*got_frame_ptr = TRUE;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
do_decode(GstVaapiDecoder *decoder, GstVideoCodecFrame *base_frame)
{
GstVaapiDecoderClass * const klass = GST_VAAPI_DECODER_GET_CLASS(decoder);
GstVaapiParserState * const ps = &decoder->priv->parser_state;
GstVaapiDecoderFrame * const frame = base_frame->user_data;
GstVaapiDecoderStatus status;
GSList *l;
ps->current_frame = base_frame;
if (klass->start_frame) {
for (l = frame->units; l != NULL; l = l->next) {
GstVaapiDecoderUnit * const unit = l->data;
if (GST_VAAPI_DECODER_UNIT_IS_SLICE(unit)) {
status = klass->start_frame(decoder, unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
break;
}
}
}
for (l = frame->units; l != NULL; l = l->next) {
GstVaapiDecoderUnit * const unit = l->data;
if (GST_VAAPI_DECODER_UNIT_IS_SKIPPED(unit))
continue;
status = klass->decode(decoder, unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
if (klass->end_frame) {
status = klass->end_frame(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_step(GstVaapiDecoder *decoder)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
GstVaapiParserState * const ps = &priv->parser_state;
GstVaapiDecoderStatus status;
GstBuffer *buffer;
gboolean at_eos, got_frame;
guint got_unit_size;
status = gst_vaapi_decoder_check_status(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
do {
buffer = pop_buffer(decoder);
if (!buffer)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
at_eos = GST_BUFFER_IS_EOS(buffer);
if (!at_eos)
gst_adapter_push(ps->input_adapter, buffer);
do {
if (!ps->current_frame) {
ps->current_frame = g_slice_new0(GstVideoCodecFrame);
if (!ps->current_frame)
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
ps->current_frame->ref_count = 1;
}
status = do_parse(decoder, ps->current_frame,
ps->input_adapter, at_eos, &got_unit_size, &got_frame);
GST_DEBUG("parse frame (status = %d)", status);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
break;
if (got_unit_size > 0) {
buffer = gst_adapter_take_buffer(ps->input_adapter,
got_unit_size);
if (gst_adapter_available(ps->output_adapter) == 0) {
ps->current_frame->pts =
gst_adapter_prev_timestamp(ps->input_adapter, NULL);
}
gst_adapter_push(ps->output_adapter, buffer);
}
if (got_frame) {
ps->current_frame->input_buffer = gst_adapter_take_buffer(
ps->output_adapter,
gst_adapter_available(ps->output_adapter));
status = do_decode(decoder, ps->current_frame);
GST_DEBUG("decode frame (status = %d)", status);
gst_video_codec_frame_unref(ps->current_frame);
ps->current_frame = NULL;
}
} while (status == GST_VAAPI_DECODER_STATUS_SUCCESS &&
gst_adapter_available(ps->input_adapter) > 0);
} while (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA);
return status;
}
static inline void
push_surface(GstVaapiDecoder *decoder, GstVaapiSurfaceProxy *proxy)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
GstClockTime duration;
GST_DEBUG("queue decoded surface %" GST_VAAPI_ID_FORMAT,
GST_VAAPI_ID_ARGS(gst_vaapi_surface_proxy_get_surface_id(proxy)));
if (priv->fps_n && priv->fps_d) {
/* Actual field duration is computed in vaapipostproc */
duration = gst_util_uint64_scale(GST_SECOND, priv->fps_d, priv->fps_n);
gst_vaapi_surface_proxy_set_duration(proxy, duration);
}
g_queue_push_tail(priv->surfaces, proxy);
}
static inline GstVaapiSurfaceProxy *
pop_surface(GstVaapiDecoder *decoder)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
return g_queue_pop_head(priv->surfaces);
}
static inline void
set_codec_data(GstVaapiDecoder *decoder, GstBuffer *codec_data)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (priv->codec_data) {
gst_buffer_unref(priv->codec_data);
priv->codec_data = NULL;
}
if (codec_data)
priv->codec_data = gst_buffer_ref(codec_data);
}
static void
set_caps(GstVaapiDecoder *decoder, GstCaps *caps)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
GstStructure * const structure = gst_caps_get_structure(caps, 0);
GstVaapiProfile profile;
const GValue *v_codec_data;
gint v1, v2;
gboolean b;
profile = gst_vaapi_profile_from_caps(caps);
if (!profile)
return;
priv->caps = gst_caps_copy(caps);
priv->codec = gst_vaapi_profile_get_codec(profile);
if (!priv->codec)
return;
if (gst_structure_get_int(structure, "width", &v1))
priv->width = v1;
if (gst_structure_get_int(structure, "height", &v2))
priv->height = v2;
if (gst_structure_get_fraction(structure, "framerate", &v1, &v2)) {
priv->fps_n = v1;
priv->fps_d = v2;
}
if (gst_structure_get_fraction(structure, "pixel-aspect-ratio", &v1, &v2)) {
priv->par_n = v1;
priv->par_d = v2;
}
if (gst_structure_get_boolean(structure, "interlaced", &b))
priv->is_interlaced = b;
v_codec_data = gst_structure_get_value(structure, "codec_data");
if (v_codec_data)
set_codec_data(decoder, gst_value_get_buffer(v_codec_data));
}
static void
clear_queue(GQueue *q, GDestroyNotify destroy)
{
while (!g_queue_is_empty(q))
destroy(g_queue_pop_head(q));
}
static void
gst_vaapi_decoder_finalize(GObject *object)
{
GstVaapiDecoder * const decoder = GST_VAAPI_DECODER(object);
GstVaapiDecoderPrivate * const priv = decoder->priv;
set_codec_data(decoder, NULL);
parser_state_finalize(&priv->parser_state);
if (priv->caps) {
gst_caps_unref(priv->caps);
priv->caps = NULL;
}
if (priv->context) {
g_object_unref(priv->context);
priv->context = NULL;
priv->va_context = VA_INVALID_ID;
}
if (priv->buffers) {
clear_queue(priv->buffers, (GDestroyNotify)destroy_buffer);
g_queue_free(priv->buffers);
priv->buffers = NULL;
}
if (priv->surfaces) {
clear_queue(priv->surfaces, (GDestroyNotify)
gst_vaapi_surface_proxy_unref);
g_queue_free(priv->surfaces);
priv->surfaces = NULL;
}
if (priv->display) {
g_object_unref(priv->display);
priv->display = NULL;
priv->va_display = NULL;
}
G_OBJECT_CLASS(gst_vaapi_decoder_parent_class)->finalize(object);
}
static void
gst_vaapi_decoder_set_property(
GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec
)
{
GstVaapiDecoder * const decoder = GST_VAAPI_DECODER(object);
GstVaapiDecoderPrivate * const priv = decoder->priv;
switch (prop_id) {
case PROP_DISPLAY:
priv->display = g_object_ref(g_value_get_object(value));
if (priv->display)
priv->va_display = gst_vaapi_display_get_display(priv->display);
else
priv->va_display = NULL;
break;
case PROP_CAPS:
set_caps(decoder, g_value_get_pointer(value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void
gst_vaapi_decoder_get_property(
GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec
)
{
GstVaapiDecoderPrivate * const priv = GST_VAAPI_DECODER(object)->priv;
switch (prop_id) {
case PROP_DISPLAY:
g_value_set_object(value, priv->display);
break;
case PROP_CAPS:
gst_value_set_caps(value, priv->caps);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void
gst_vaapi_decoder_class_init(GstVaapiDecoderClass *klass)
{
GObjectClass * const object_class = G_OBJECT_CLASS(klass);
g_type_class_add_private(klass, sizeof(GstVaapiDecoderPrivate));
object_class->finalize = gst_vaapi_decoder_finalize;
object_class->set_property = gst_vaapi_decoder_set_property;
object_class->get_property = gst_vaapi_decoder_get_property;
/**
* GstVaapiDecoder:display:
*
* The #GstVaapiDisplay this decoder is bound to.
*/
g_properties[PROP_DISPLAY] =
g_param_spec_object("display",
"Display",
"The GstVaapiDisplay this decoder is bound to",
GST_VAAPI_TYPE_DISPLAY,
G_PARAM_READWRITE|G_PARAM_CONSTRUCT_ONLY);
g_properties[PROP_CAPS] =
g_param_spec_pointer("caps",
"Decoder caps",
"The decoder caps",
G_PARAM_READWRITE|G_PARAM_CONSTRUCT_ONLY);
g_object_class_install_properties(object_class, N_PROPERTIES, g_properties);
}
static void
gst_vaapi_decoder_init(GstVaapiDecoder *decoder)
{
GstVaapiDecoderPrivate *priv = GST_VAAPI_DECODER_GET_PRIVATE(decoder);
parser_state_init(&priv->parser_state);
decoder->priv = priv;
priv->display = NULL;
priv->va_display = NULL;
priv->context = NULL;
priv->va_context = VA_INVALID_ID;
priv->caps = NULL;
priv->codec = 0;
priv->codec_data = NULL;
priv->width = 0;
priv->height = 0;
priv->fps_n = 0;
priv->fps_d = 0;
priv->par_n = 0;
priv->par_d = 0;
priv->buffers = g_queue_new();
priv->surfaces = g_queue_new();
priv->is_interlaced = FALSE;
}
/**
* gst_vaapi_decoder_get_codec:
* @decoder: a #GstVaapiDecoder
*
* Retrieves the @decoder codec type.
*
* Return value: the #GstVaapiCodec type for @decoder
*/
GstVaapiCodec
gst_vaapi_decoder_get_codec(GstVaapiDecoder *decoder)
{
g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), (GstVaapiCodec)0);
return decoder->priv->codec;
}
/**
* gst_vaapi_decoder_get_caps:
* @decoder: a #GstVaapiDecoder
*
* Retrieves the @decoder caps. The deocder owns the returned caps, so
* use gst_caps_ref() whenever necessary.
*
* Return value: the @decoder caps
*/
GstCaps *
gst_vaapi_decoder_get_caps(GstVaapiDecoder *decoder)
{
return decoder->priv->caps;
}
/**
* gst_vaapi_decoder_put_buffer:
* @decoder: a #GstVaapiDecoder
* @buf: a #GstBuffer
*
* Queues a #GstBuffer to the HW decoder. The decoder holds a
* reference to @buf.
*
* Caller can notify an End-Of-Stream with @buf set to %NULL. However,
* if an empty buffer is passed, i.e. a buffer with %NULL data pointer
* or size equals to zero, then the function ignores this buffer and
* returns %TRUE.
*
* Return value: %TRUE on success
*/
gboolean
gst_vaapi_decoder_put_buffer(GstVaapiDecoder *decoder, GstBuffer *buf)
{
g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), FALSE);
if (buf) {
if (!GST_BUFFER_DATA(buf) || GST_BUFFER_SIZE(buf) <= 0)
return TRUE;
buf = gst_buffer_ref(buf);
}
return push_buffer(decoder, buf);
}
/**
* gst_vaapi_decoder_get_surface:
* @decoder: a #GstVaapiDecoder
* @pstatus: return location for the decoder status, or %NULL
*
* Flushes encoded buffers to the decoder and returns a decoded
* surface, if any.
*
* Return value: a #GstVaapiSurfaceProxy holding the decoded surface,
* or %NULL if none is available (e.g. an error). Caller owns the
* returned object. g_object_unref() after usage.
*/
GstVaapiSurfaceProxy *
gst_vaapi_decoder_get_surface(
GstVaapiDecoder *decoder,
GstVaapiDecoderStatus *pstatus
)
{
GstVaapiSurfaceProxy *proxy;
GstVaapiDecoderStatus status;
if (pstatus)
*pstatus = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), NULL);
proxy = pop_surface(decoder);
if (!proxy) {
do {
status = decode_step(decoder);
} while (status == GST_VAAPI_DECODER_STATUS_SUCCESS);
proxy = pop_surface(decoder);
}
if (proxy)
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
if (pstatus)
*pstatus = status;
return proxy;
}
void
gst_vaapi_decoder_set_picture_size(
GstVaapiDecoder *decoder,
guint width,
guint height
)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
gboolean size_changed = FALSE;
if (priv->width != width) {
GST_DEBUG("picture width changed to %d", width);
priv->width = width;
gst_caps_set_simple(priv->caps, "width", G_TYPE_INT, width, NULL);
size_changed = TRUE;
}
if (priv->height != height) {
GST_DEBUG("picture height changed to %d", height);
priv->height = height;
gst_caps_set_simple(priv->caps, "height", G_TYPE_INT, height, NULL);
size_changed = TRUE;
}
if (size_changed)
g_object_notify_by_pspec(G_OBJECT(decoder), g_properties[PROP_CAPS]);
}
void
gst_vaapi_decoder_set_framerate(
GstVaapiDecoder *decoder,
guint fps_n,
guint fps_d
)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (!fps_n || !fps_d)
return;
if (priv->fps_n != fps_n || priv->fps_d != fps_d) {
GST_DEBUG("framerate changed to %u/%u", fps_n, fps_d);
priv->fps_n = fps_n;
priv->fps_d = fps_d;
gst_caps_set_simple(
priv->caps,
"framerate", GST_TYPE_FRACTION, fps_n, fps_d,
NULL
);
g_object_notify_by_pspec(G_OBJECT(decoder), g_properties[PROP_CAPS]);
}
}
void
gst_vaapi_decoder_set_pixel_aspect_ratio(
GstVaapiDecoder *decoder,
guint par_n,
guint par_d
)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (!par_n || !par_d)
return;
if (priv->par_n != par_n || priv->par_d != par_d) {
GST_DEBUG("pixel-aspect-ratio changed to %u/%u", par_n, par_d);
priv->par_n = par_n;
priv->par_d = par_d;
gst_caps_set_simple(
priv->caps,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d,
NULL
);
g_object_notify_by_pspec(G_OBJECT(decoder), g_properties[PROP_CAPS]);
}
}
void
gst_vaapi_decoder_set_interlaced(GstVaapiDecoder *decoder, gboolean interlaced)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (priv->is_interlaced != interlaced) {
GST_DEBUG("interlaced changed to %s", interlaced ? "true" : "false");
priv->is_interlaced = interlaced;
gst_caps_set_simple(
priv->caps,
"interlaced", G_TYPE_BOOLEAN, interlaced,
NULL
);
g_object_notify_by_pspec(G_OBJECT(decoder), g_properties[PROP_CAPS]);
}
}
gboolean
gst_vaapi_decoder_ensure_context(
GstVaapiDecoder *decoder,
GstVaapiContextInfo *cip
)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
gst_vaapi_decoder_set_picture_size(decoder, cip->width, cip->height);
if (priv->context) {
if (!gst_vaapi_context_reset_full(priv->context, cip))
return FALSE;
}
else {
priv->context = gst_vaapi_context_new_full(priv->display, cip);
if (!priv->context)
return FALSE;
}
priv->va_context = gst_vaapi_context_get_id(priv->context);
return TRUE;
}
void
gst_vaapi_decoder_push_surface_proxy(
GstVaapiDecoder *decoder,
GstVaapiSurfaceProxy *proxy
)
{
push_surface(decoder, proxy);
}
GstVaapiDecoderStatus
gst_vaapi_decoder_check_status(GstVaapiDecoder *decoder)
{
GstVaapiDecoderPrivate * const priv = decoder->priv;
if (priv->context && gst_vaapi_context_get_surface_count(priv->context) < 1)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}