Remove VDPAU plugin

It's been replaced by NVENC/NVDEC and even NVIDIA doesn't
support VDPAU any longer and hasn't for quite some time.

The plugin has been unmaintained and unsupported for a very
long time, and given the track record over the last 10 years
it seems highly unlikely anyone is going to make it work well,
not to mention adding plumbing for proper zero-copy or
gst-gl integration.

Closes https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/issues/828
This commit is contained in:
Tim-Philipp Müller 2019-06-06 18:22:43 +01:00
parent c2555c6a89
commit 4ff14c0e74
39 changed files with 3 additions and 8864 deletions

View file

@ -2054,51 +2054,6 @@ AG_GST_CHECK_FEATURE(DVB, [DVB Source], dvb, [
], [HAVE_DVB="no"])
])
dnl *** vdpau ***
translit(dnm, m, l) AM_CONDITIONAL(USE_VDPAU, true)
AG_GST_CHECK_FEATURE(VDPAU, [VDPAU], vdpau, [
VDPAU_CFLAGS=
VDPAU_LIBS=-lvdpau
HAVE_VDPAU=no
PKG_CHECK_MODULES(VDPAU, vdpau, [HAVE_VDPAU="yes"],
[
if test "$HAVE_VDPAU" = "no"; then
saved_CPPFLAGS="$CPPFLAGS"
AC_CHECK_HEADER([vdpau/vdpau.h], [HAVE_VDPAU_H=yes])
if test -z "$HAVE_VDPAU_H"; then
dnl Didn't find VDPAU header straight away.
dnl Try /usr/include/nvidia. Need to clear caching vars first
AC_MSG_NOTICE([VDPAU header not in standard path. Checking /usr/include/nvidia])
unset ac_cv_header_vdpau_vdpau_h
unset ac_cv_header_vdpau_vdpau_x11_h
VDPAU_CFLAGS="-I/usr/include/nvidia"
VDPAU_LIBS="-L/usr/lib/nvidia -lvdpau"
CPPFLAGS="$VDPAU_CFLAGS $saved_CPPFLAGS"
AC_CHECK_HEADER([vdpau/vdpau.h], [HAVE_VDPAU_H=yes])
fi
AC_CHECK_HEADER([vdpau/vdpau_x11.h], [HAVE_VDPAU_X11_H=yes])
if test "x$HAVE_VDPAU_H" = "xyes" -a "x$HAVE_VDPAU_X11_H" = "xyes"; then
dnl Found the headers - look for the lib
AC_MSG_NOTICE([VDPAU headers found. Checking libraries])
saved_LIBS="$LIBS"
LIBS="$VDPAU_LIBS $saved_LIBS"
AC_CHECK_LIB(vdpau,vdp_device_create_x11,[HAVE_VDPAU="yes"])
LIBS="$saved_LIBS"
fi
fi
])
if test "$HAVE_VDPAU" = "yes"; then
AC_MSG_NOTICE([Found up to date VDPAU installation])
AC_SUBST(VDPAU_CFLAGS)
AC_SUBST(VDPAU_LIBS)
fi
])
dnl *** sbc ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SBC, true)
AG_GST_CHECK_FEATURE(SBC, [SBC bluetooth audio codec], sbc, [
@ -2318,7 +2273,6 @@ AM_CONDITIONAL(USE_SRTP, false)
AM_CONDITIONAL(USE_SRT, false)
AM_CONDITIONAL(USE_GME, false)
AM_CONDITIONAL(USE_DVB, false)
AM_CONDITIONAL(USE_VDPAU, false)
AM_CONDITIONAL(USE_SBC, false)
AM_CONDITIONAL(USE_ZBAR, false)
AM_CONDITIONAL(USE_RSVG, false)
@ -2539,7 +2493,6 @@ sys/opensles/Makefile
sys/shm/Makefile
sys/tinyalsa/Makefile
sys/uvch264/Makefile
sys/vdpau/Makefile
sys/wasapi/Makefile
sys/winks/Makefile
sys/winscreencap/Makefile

View file

@ -47188,54 +47188,6 @@
"source": "gst-plugins-bad",
"url": "Unknown package origin"
},
"vdpau": {
"description": "Various elements utilizing VDPAU",
"elements": {
"vdpaumpegdec": {
"author": "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>",
"description": "Decode mpeg stream with vdpau",
"hierarchy": [
"GstVdpMpegDec",
"GstVdpDecoder",
"GstVideoDecoder",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Decoder",
"long-name": "VDPAU Mpeg Decoder",
"pad-templates": {
"sink": {
"caps": "video/mpeg:\n mpegversion: [ 1, 2 ]\n systemstream: false\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "video/x-raw(memory:VdpVideoSurface):\n format: { YV12 }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\nvideo/x-raw:\n format: { YV12 }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n",
"direction": "src",
"presence": "always"
}
},
"properties": {
"display": {
"blurb": "X Display name",
"construct": true,
"construct-only": false,
"default": "NULL",
"type-name": "gchararray",
"writable": true
}
},
"rank": "none"
}
},
"filename": "gstvdpau",
"license": "LGPL",
"package": "GStreamer Bad Plug-ins git",
"source": "gst-plugins-bad",
"url": "Unknown package origin"
},
"videofiltersbad": {
"description": "Video filters in gst-plugins-bad",
"elements": {

View file

@ -144,7 +144,6 @@ option('teletext', type : 'feature', value : 'auto', description : 'Teletext plu
option('tinyalsa', type : 'feature', value : 'auto', description : 'TinyALSA plugin')
option('ttml', type : 'feature', value : 'auto', description : 'TTML subtitle parser and renderer plugin')
option('uvch264', type : 'feature', value : 'auto', description : 'UVC compliant H.264 camera source plugin')
option('vdpau', type : 'feature', value : 'auto', description : 'Nvidia VDPAU plugin')
option('voaacenc', type : 'feature', value : 'auto', description : 'AAC audio encoder plugin')
option('voamrwbenc', type : 'feature', value : 'auto', description : 'AMR-WB audio encoder plugin')
option('vulkan', type : 'feature', value : 'auto', description : 'Vulkan video sink plugin')

View file

@ -82,12 +82,6 @@ else
OPENSLES_DIR=
endif
if USE_VDPAU
VDPAU_DIR=vdpau
else
VDPAU_DIR=
endif
if USE_WINSCREENCAP
WINSCREENCAP_DIR=winscreencap
else
@ -124,10 +118,10 @@ else
MSDK_DIR=
endif
SUBDIRS = $(ANDROID_MEDIA_DIR) $(APPLE_MEDIA_DIR) $(BLUEZ_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTSOUND_DIR) $(WINKS_DIR) $(DVB_DIR) $(FBDEV_DIR) $(IPCPIPELINE_DIR) $(KMS_DIR) $(OPENSLES_DIR) $(SHM_DIR) $(UVCH264_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR) $(WASAPI_DIR) $(NVDEC_DIR) $(NVENC_DIR) $(TINYALSA_DIR) $(MSDK_DIR)
SUBDIRS = $(ANDROID_MEDIA_DIR) $(APPLE_MEDIA_DIR) $(BLUEZ_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTSOUND_DIR) $(WINKS_DIR) $(DVB_DIR) $(FBDEV_DIR) $(IPCPIPELINE_DIR) $(KMS_DIR) $(OPENSLES_DIR) $(SHM_DIR) $(UVCH264_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR) $(WASAPI_DIR) $(NVDEC_DIR) $(NVENC_DIR) $(TINYALSA_DIR) $(MSDK_DIR)
DIST_SUBDIRS = androidmedia applemedia bluez d3dvideosink decklink directsound dvb fbdev ipcpipeline kms dshowdecwrapper dshowsrcwrapper dshowvideosink \
opensles shm uvch264 vdpau wasapi winks winscreencap \
opensles shm uvch264 wasapi winks winscreencap \
nvdec nvenc tinyalsa msdk
include $(top_srcdir)/common/parallel-subdirs.mak

View file

@ -16,7 +16,6 @@ subdir('opensles')
subdir('shm')
subdir('tinyalsa')
subdir('uvch264')
subdir('vdpau')
subdir('wasapi')
subdir('winks')
subdir('winscreencap')

View file

@ -1,43 +0,0 @@
plugin_LTLIBRARIES = libgstvdpau.la
libgstvdpau_la_SOURCES = \
gstvdpau.c \
gstvdputils.c \
gstvdpvideomemory.c \
gstvdpvideobufferpool.c \
gstvdpdevice.c \
gstvdpdecoder.c \
mpeg/gstvdpmpegdec.c
# \
# h264/gsth264dpb.c \
# h264/gstvdph264dec.c
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdpau_la_LIBADD = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-$(GST_API_VERSION).la \
$(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_API_VERSION) \
$(VDPAU_LIBS) $(LIBM)
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
noinst_HEADERS = \
gstvdputils.h \
gstvdpvideomemory.h \
gstvdpvideobufferpool.h \
gstvdpdevice.h \
gstvdpdecoder.h \
gstvdpoutputbuffer.h \
gstvdpvideopostprocess.h \
gstvdpsink.h \
mpeg/gstvdpmpegdec.h \
mpeg4/mpeg4util.h \
mpeg4/gstmpeg4frame.h \
mpeg4/gstvdpmpeg4dec.h
# h264/gsth264dpb.h \
# h264/gstvdph264dec.h

View file

@ -1,49 +0,0 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpvideomemory.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpdecoder.h"
#include "mpeg/gstvdpmpegdec.h"
/* #include "h264/gstvdph264dec.h" */
#include "mpeg4/gstvdpmpeg4dec.h"
#include "gstvdpvideopostprocess.h"
#include "gstvdpsink.h"
static gboolean
vdpau_init (GstPlugin * vdpau_plugin)
{
gboolean ret;
/* do this so debug categories get created */
gst_vdp_device_get_type ();
gst_vdp_decoder_get_type ();
gst_vdp_video_memory_init ();
/* Before giving these elements a rank again, make sure they pass at
* least the generic/states test when there's no device available */
ret = gst_element_register (vdpau_plugin, "vdpaumpegdec",
GST_RANK_NONE, GST_TYPE_VDP_MPEG_DEC);
/* ret &= gst_element_register (vdpau_plugin, "vdpauh264dec", */
/* GST_RANK_NONE, GST_TYPE_VDP_H264_DEC); */
/* gst_element_register (vdpau_plugin, "vdpaumpeg4dec", */
/* GST_RANK_NONE, GST_TYPE_VDP_MPEG4_DEC); */
/* gst_element_register (vdpau_plugin, "vdpauvideopostprocess", */
/* GST_RANK_NONE, GST_TYPE_VDP_VIDEO_POST_PROCESS); */
/* gst_element_register (vdpau_plugin, "vdpausink", */
/* GST_RANK_NONE, GST_TYPE_VDP_SINK); */
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
vdpau,
"Various elements utilizing VDPAU",
vdpau_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -1,369 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpdecoder.h"
#include "gstvdpvideomemory.h"
#include "gstvdpvideobufferpool.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_decoder_debug);
#define GST_CAT_DEFAULT gst_vdp_decoder_debug
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_decoder_debug, "vdpdecoder", 0, \
"VDPAU decoder base class");
#define gst_vdp_decoder_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpDecoder, gst_vdp_decoder, GST_TYPE_VIDEO_DECODER,
DEBUG_INIT);
enum
{
PROP_0,
PROP_DISPLAY
};
void
gst_vdp_decoder_post_error (GstVdpDecoder * decoder, GError * error)
{
GstMessage *message;
g_return_if_fail (GST_IS_VDP_DECODER (decoder));
g_return_if_fail (decoder != NULL);
message = gst_message_new_error (GST_OBJECT (decoder), error, NULL);
gst_element_post_message (GST_ELEMENT (decoder), message);
g_error_free (error);
}
GstFlowReturn
gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo * info,
guint n_bufs, VdpBitstreamBuffer * bufs, GstVideoCodecFrame * frame)
{
GstFlowReturn ret;
VdpStatus status;
GstVdpVideoMemory *vmem;
#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
#endif
GST_DEBUG_OBJECT (vdp_decoder, "n_bufs:%d, frame:%d", n_bufs,
frame->system_frame_number);
ret =
gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (vdp_decoder),
frame);
if (ret != GST_FLOW_OK)
goto fail_alloc;
vmem = (GstVdpVideoMemory *) gst_buffer_get_memory (frame->output_buffer, 0);
if (!vmem
|| !gst_memory_is_type ((GstMemory *) vmem,
GST_VDP_VIDEO_MEMORY_ALLOCATOR))
goto no_mem;
GST_DEBUG_OBJECT (vdp_decoder, "Calling VdpDecoderRender()");
#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
#endif
status =
vdp_decoder->device->vdp_decoder_render (vdp_decoder->decoder,
vmem->surface, info, n_bufs, bufs);
#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
#endif
if (status != VDP_STATUS_OK)
goto decode_error;
GST_DEBUG_OBJECT (vdp_decoder, "VdpDecoderRender() took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));
return GST_FLOW_OK;
decode_error:
GST_ELEMENT_ERROR (vdp_decoder, RESOURCE, READ,
("Could not decode"),
("Error returned from vdpau was: %s",
vdp_decoder->device->vdp_get_error_string (status)));
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (vdp_decoder), frame);
return GST_FLOW_ERROR;
fail_alloc:
{
GST_WARNING_OBJECT (vdp_decoder, "Failed to get an output frame");
return ret;
}
no_mem:
{
GST_ERROR_OBJECT (vdp_decoder, "Didn't get VdpVideoSurface backed buffer");
return GST_FLOW_ERROR;
}
}
GstFlowReturn
gst_vdp_decoder_init_decoder (GstVdpDecoder * vdp_decoder,
VdpDecoderProfile profile, guint32 max_references,
GstVideoCodecState * output_state)
{
GstVdpDevice *device;
VdpStatus status;
device = vdp_decoder->device;
if (vdp_decoder->decoder != VDP_INVALID_HANDLE) {
status = device->vdp_decoder_destroy (vdp_decoder->decoder);
if (status != VDP_STATUS_OK)
goto destroy_decoder_error;
}
GST_DEBUG_OBJECT (vdp_decoder,
"device:%u, profile:%d, width:%d, height:%d, max_references:%d",
device->device, profile, output_state->info.width,
output_state->info.height, max_references);
status = device->vdp_decoder_create (device->device, profile,
output_state->info.width, output_state->info.height, max_references,
&vdp_decoder->decoder);
if (status != VDP_STATUS_OK)
goto create_decoder_error;
return GST_FLOW_OK;
destroy_decoder_error:
GST_ELEMENT_ERROR (vdp_decoder, RESOURCE, READ,
("Could not destroy vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
create_decoder_error:
GST_ELEMENT_ERROR (vdp_decoder, RESOURCE, READ,
("Could not create vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
static gboolean
gst_vdp_decoder_decide_allocation (GstVideoDecoder * video_decoder,
GstQuery * query)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
GstCaps *outcaps;
GstBufferPool *pool = NULL;
guint size, min = 0, max = 0;
GstStructure *config;
GstVideoInfo vinfo;
gboolean update_pool;
gst_query_parse_allocation (query, &outcaps, NULL);
gst_video_info_init (&vinfo);
gst_video_info_from_caps (&vinfo, outcaps);
if (gst_query_get_n_allocation_pools (query) > 0) {
gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
size = MAX (size, vinfo.size);
update_pool = TRUE;
} else {
pool = NULL;
size = vinfo.size;
min = max = 0;
update_pool = FALSE;
}
if (pool == NULL
|| !gst_buffer_pool_has_option (pool,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META)) {
if (pool)
gst_object_unref (pool);
/* no pool or pool doesn't support GstVdpVideoMeta, we can make our own */
GST_DEBUG_OBJECT (video_decoder,
"no pool or doesn't support GstVdpVideoMeta, making new pool");
pool = gst_vdp_video_buffer_pool_new (vdp_decoder->device);
}
/* now configure */
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META);
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
gst_buffer_pool_set_config (pool, config);
if (update_pool)
gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
else
gst_query_add_allocation_pool (query, pool, size, min, max);
if (pool)
gst_object_unref (pool);
return TRUE;
}
static gboolean
gst_vdp_decoder_start (GstVideoDecoder * video_decoder)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
GError *err = NULL;
GST_DEBUG_OBJECT (video_decoder, "Starting");
vdp_decoder->device = gst_vdp_get_device (vdp_decoder->display, &err);
if (G_UNLIKELY (!vdp_decoder->device))
goto device_error;
vdp_decoder->decoder = VDP_INVALID_HANDLE;
return TRUE;
device_error:
gst_vdp_decoder_post_error (vdp_decoder, err);
return FALSE;
}
static gboolean
gst_vdp_decoder_stop (GstVideoDecoder * video_decoder)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (video_decoder);
if (vdp_decoder->decoder != VDP_INVALID_HANDLE) {
GstVdpDevice *device = vdp_decoder->device;
VdpStatus status;
status = device->vdp_decoder_destroy (vdp_decoder->decoder);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (vdp_decoder, RESOURCE, READ,
("Could not destroy vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return FALSE;
}
}
g_object_unref (vdp_decoder->device);
return TRUE;
}
static void
gst_vdp_decoder_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (object);
switch (prop_id) {
case PROP_DISPLAY:
g_value_set_string (value, vdp_decoder->display);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_decoder_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (object);
switch (prop_id) {
case PROP_DISPLAY:
g_free (vdp_decoder->display);
vdp_decoder->display = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_decoder_finalize (GObject * object)
{
GstVdpDecoder *vdp_decoder = GST_VDP_DECODER (object);
g_free (vdp_decoder->display);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_vdp_decoder_init (GstVdpDecoder * vdp_decoder)
{
}
static void
gst_vdp_decoder_class_init (GstVdpDecoderClass * klass)
{
GObjectClass *object_class;
GstVideoDecoderClass *video_decoder_class;
GstElementClass *element_class;
GstCaps *src_caps;
GstPadTemplate *src_template;
object_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass);
video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
object_class->get_property = gst_vdp_decoder_get_property;
object_class->set_property = gst_vdp_decoder_set_property;
object_class->finalize = gst_vdp_decoder_finalize;
video_decoder_class->start = gst_vdp_decoder_start;
video_decoder_class->stop = gst_vdp_decoder_stop;
video_decoder_class->decide_allocation = gst_vdp_decoder_decide_allocation;
GST_FIXME ("Actually create srcpad template from hw capabilities");
src_caps =
gst_caps_from_string (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
(GST_CAPS_FEATURE_MEMORY_VDPAU,
"{ YV12 }") ";" GST_VIDEO_CAPS_MAKE ("{ YV12 }"));
src_template =
gst_pad_template_new (GST_VIDEO_DECODER_SRC_NAME, GST_PAD_SRC,
GST_PAD_ALWAYS, src_caps);
gst_element_class_add_pad_template (element_class, src_template);
if (src_caps)
gst_caps_unref (src_caps);
g_object_class_install_property (object_class,
PROP_DISPLAY, g_param_spec_string ("display", "Display", "X Display name",
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
}

View file

@ -1,76 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_DECODER_H__
#define __GST_VDP_DECODER_H__
#include <gst/gst.h>
#include <vdpau/vdpau.h>
#include <gst/video/gstvideodecoder.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_DECODER (gst_vdp_decoder_get_type())
#define GST_VDP_DECODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VDP_DECODER, GstVdpDecoder))
#define GST_VDP_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VDP_DECODER, GstVdpDecoderClass))
#define GST_VDP_DECODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_VDP_DECODER, GstVdpDecoderClass))
#define GST_IS_VDP_DECODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VDP_DECODER))
#define GST_IS_VDP_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VDP_DECODER))
typedef struct _GstVdpDecoder GstVdpDecoder;
typedef struct _GstVdpDecoderClass GstVdpDecoderClass;
struct _GstVdpDecoder {
GstVideoDecoder video_decoder;
GstVdpDevice *device;
VdpDecoder decoder;
GstVideoInfo info;
/* properties */
gchar *display;
};
struct _GstVdpDecoderClass {
GstVideoDecoderClass video_decoder_class;
};
void
gst_vdp_decoder_post_error (GstVdpDecoder * decoder, GError * error);
GstFlowReturn
gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo *info,
guint n_bufs, VdpBitstreamBuffer *bufs, GstVideoCodecFrame *frame);
GstFlowReturn
gst_vdp_decoder_init_decoder (GstVdpDecoder * vdp_decoder,
VdpDecoderProfile profile, guint32 max_references,
GstVideoCodecState *output_state);
GType gst_vdp_decoder_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_DECODER_H__ */

View file

@ -1,334 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
* with newer GLib versions (>= 2.31.0) */
#define GLIB_DISABLE_DEPRECATION_WARNINGS
#include "gstvdpdevice.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_device_debug);
#define GST_CAT_DEFAULT gst_vdp_device_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_device_debug, "vdpdevice", 0, "VDPAU device object");
enum
{
PROP_0,
PROP_DISPLAY
};
G_DEFINE_TYPE_WITH_CODE (GstVdpDevice, gst_vdp_device, G_TYPE_OBJECT,
DEBUG_INIT ());
static gboolean
gst_vdp_device_open (GstVdpDevice * device, GError ** error)
{
gint screen;
VdpStatus status;
gint i;
typedef struct
{
gint id;
void *func;
} VdpFunction;
VdpFunction vdp_function[] = {
{VDP_FUNC_ID_DEVICE_DESTROY, &device->vdp_device_destroy},
{VDP_FUNC_ID_VIDEO_SURFACE_CREATE,
&device->vdp_video_surface_create},
{VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,
&device->vdp_video_surface_destroy},
{VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
&device->vdp_video_surface_query_capabilities},
{VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
&device->vdp_video_surface_query_ycbcr_capabilities},
{VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR,
&device->vdp_video_surface_get_bits_ycbcr},
{VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR,
&device->vdp_video_surface_put_bits_ycbcr},
{VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS,
&device->vdp_video_surface_get_parameters},
{VDP_FUNC_ID_DECODER_CREATE, &device->vdp_decoder_create},
{VDP_FUNC_ID_DECODER_RENDER, &device->vdp_decoder_render},
{VDP_FUNC_ID_DECODER_DESTROY, &device->vdp_decoder_destroy},
{VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
&device->vdp_decoder_query_capabilities},
{VDP_FUNC_ID_DECODER_GET_PARAMETERS,
&device->vdp_decoder_get_parameters},
{VDP_FUNC_ID_VIDEO_MIXER_CREATE, &device->vdp_video_mixer_create},
{VDP_FUNC_ID_VIDEO_MIXER_DESTROY, &device->vdp_video_mixer_destroy},
{VDP_FUNC_ID_VIDEO_MIXER_RENDER, &device->vdp_video_mixer_render},
{VDP_FUNC_ID_VIDEO_MIXER_SET_FEATURE_ENABLES,
&device->vdp_video_mixer_set_feature_enables},
{VDP_FUNC_ID_VIDEO_MIXER_SET_ATTRIBUTE_VALUES,
&device->vdp_video_mixer_set_attribute_values},
{VDP_FUNC_ID_OUTPUT_SURFACE_CREATE, &device->vdp_output_surface_create},
{VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY, &device->vdp_output_surface_destroy},
{VDP_FUNC_ID_OUTPUT_SURFACE_QUERY_CAPABILITIES,
&device->vdp_output_surface_query_capabilities},
{VDP_FUNC_ID_OUTPUT_SURFACE_GET_BITS_NATIVE,
&device->vdp_output_surface_get_bits_native},
{VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_CREATE_X11,
&device->vdp_presentation_queue_target_create_x11},
{VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_DESTROY,
&device->vdp_presentation_queue_target_destroy},
{VDP_FUNC_ID_PRESENTATION_QUEUE_CREATE,
&device->vdp_presentation_queue_create},
{VDP_FUNC_ID_PRESENTATION_QUEUE_DESTROY,
&device->vdp_presentation_queue_destroy},
{VDP_FUNC_ID_PRESENTATION_QUEUE_DISPLAY,
&device->vdp_presentation_queue_display},
{VDP_FUNC_ID_PRESENTATION_QUEUE_BLOCK_UNTIL_SURFACE_IDLE,
&device->vdp_presentation_queue_block_until_surface_idle},
{VDP_FUNC_ID_PRESENTATION_QUEUE_SET_BACKGROUND_COLOR,
&device->vdp_presentation_queue_set_background_color},
{VDP_FUNC_ID_PRESENTATION_QUEUE_QUERY_SURFACE_STATUS,
&device->vdp_presentation_queue_query_surface_status}
};
GST_DEBUG_OBJECT (device, "Opening the device for display '%s'",
device->display_name);
device->display = XOpenDisplay (device->display_name);
if (!device->display)
goto create_display_error;
screen = DefaultScreen (device->display);
status =
vdp_device_create_x11 (device->display, screen, &device->device,
&device->vdp_get_proc_address);
if (status != VDP_STATUS_OK)
goto create_device_error;
status = device->vdp_get_proc_address (device->device,
VDP_FUNC_ID_GET_ERROR_STRING, (void **) &device->vdp_get_error_string);
if (status != VDP_STATUS_OK)
goto get_error_string_error;
for (i = 0; i < G_N_ELEMENTS (vdp_function); i++) {
status = device->vdp_get_proc_address (device->device,
vdp_function[i].id, vdp_function[i].func);
if (status != VDP_STATUS_OK)
goto function_error;
}
GST_DEBUG_OBJECT (device, "Succesfully opened the device");
return TRUE;
create_display_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ,
"Could not open X display with name: %s", device->display_name);
return FALSE;
create_device_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ,
"Could not create VDPAU device for display: %s", device->display_name);
return FALSE;
get_error_string_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ,
"Could not get vdp_get_error_string function pointer from VDPAU");
return FALSE;
function_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ,
"Could not get function pointer from VDPAU, error returned was: %s",
device->vdp_get_error_string (status));
return FALSE;
}
static GstVdpDevice *
gst_vdp_device_new (const gchar * display_name, GError ** error)
{
GstVdpDevice *device;
device = g_object_new (GST_TYPE_VDP_DEVICE, "display", display_name, NULL);
if (!gst_vdp_device_open (device, error)) {
g_object_unref (device);
return NULL;
}
return device;
}
static void
gst_vdp_device_init (GstVdpDevice * device)
{
device->display_name = NULL;
device->display = NULL;
device->device = VDP_INVALID_HANDLE;
device->vdp_decoder_destroy = NULL;
}
static void
gst_vdp_device_finalize (GObject * object)
{
GstVdpDevice *device = (GstVdpDevice *) object;
if (device->device != VDP_INVALID_HANDLE && device->vdp_decoder_destroy) {
device->vdp_device_destroy (device->device);
device->device = VDP_INVALID_HANDLE;
}
if (device->display) {
XCloseDisplay (device->display);
device->display = NULL;
}
g_free (device->display_name);
device->display_name = NULL;
G_OBJECT_CLASS (gst_vdp_device_parent_class)->finalize (object);
}
static void
gst_vdp_device_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpDevice *device;
g_return_if_fail (GST_IS_VDP_DEVICE (object));
device = (GstVdpDevice *) object;
switch (prop_id) {
case PROP_DISPLAY:
device->display_name = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_device_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstVdpDevice *device;
g_return_if_fail (GST_IS_VDP_DEVICE (object));
device = (GstVdpDevice *) object;
switch (prop_id) {
case PROP_DISPLAY:
g_value_set_string (value, device->display_name);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdp_device_class_init (GstVdpDeviceClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gst_vdp_device_finalize;
object_class->get_property = gst_vdp_device_get_property;
object_class->set_property = gst_vdp_device_set_property;
g_object_class_install_property (object_class,
PROP_DISPLAY,
g_param_spec_string ("display",
"Display",
"X Display Name",
"", G_PARAM_READABLE | G_PARAM_WRITABLE | G_PARAM_CONSTRUCT_ONLY));
}
typedef struct
{
GHashTable *hash_table;
GMutex mutex;
} GstVdpDeviceCache;
static void
device_destroyed_cb (gpointer data, GObject * object)
{
GstVdpDeviceCache *device_cache = data;
GHashTableIter iter;
gpointer device;
GST_DEBUG ("Removing object from hash table");
g_mutex_lock (&device_cache->mutex);
g_hash_table_iter_init (&iter, device_cache->hash_table);
while (g_hash_table_iter_next (&iter, NULL, &device)) {
if (device == object) {
g_hash_table_iter_remove (&iter);
break;
}
}
g_mutex_unlock (&device_cache->mutex);
}
GstVdpDevice *
gst_vdp_get_device (const gchar * display_name, GError ** error)
{
static gsize once = 0;
static GstVdpDeviceCache device_cache;
GstVdpDevice *device;
GST_DEBUG ("display_name '%s'", display_name);
if (g_once_init_enter (&once)) {
device_cache.hash_table =
g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
g_mutex_init (&device_cache.mutex);
g_once_init_leave (&once, 1);
}
g_mutex_lock (&device_cache.mutex);
if (display_name)
device = g_hash_table_lookup (device_cache.hash_table, display_name);
else
device = g_hash_table_lookup (device_cache.hash_table, "");
if (!device) {
GST_DEBUG ("No cached device, creating a new one");
device = gst_vdp_device_new (display_name, error);
if (device) {
g_object_weak_ref (G_OBJECT (device), device_destroyed_cb, &device_cache);
if (display_name)
g_hash_table_insert (device_cache.hash_table, g_strdup (display_name),
device);
else
g_hash_table_insert (device_cache.hash_table, g_strdup (""), device);
} else
GST_ERROR ("Could not create GstVdpDevice !");
} else
g_object_ref (device);
g_mutex_unlock (&device_cache.mutex);
return device;
}

View file

@ -1,101 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_DEVICE_H_
#define _GST_VDP_DEVICE_H_
#include <X11/Xlib.h>
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_VDP_DEVICE (gst_vdp_device_get_type ())
#define GST_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_DEVICE, GstVdpDevice))
#define GST_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
#define GST_IS_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_DEVICE))
#define GST_IS_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_DEVICE))
#define GST_VDP_DEVICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
typedef struct _GstVdpDeviceClass GstVdpDeviceClass;
typedef struct _GstVdpDevice GstVdpDevice;
struct _GstVdpDeviceClass
{
GObjectClass parent_class;
};
struct _GstVdpDevice
{
GObject object;
gchar *display_name;
Display *display;
VdpDevice device;
VdpDeviceDestroy *vdp_device_destroy;
VdpGetProcAddress *vdp_get_proc_address;
VdpGetErrorString *vdp_get_error_string;
VdpVideoSurfaceCreate *vdp_video_surface_create;
VdpVideoSurfaceDestroy *vdp_video_surface_destroy;
VdpVideoSurfaceQueryCapabilities *vdp_video_surface_query_capabilities;
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *vdp_video_surface_query_ycbcr_capabilities;
VdpVideoSurfaceGetParameters *vdp_video_surface_get_parameters;
VdpVideoSurfaceGetBitsYCbCr *vdp_video_surface_get_bits_ycbcr;
VdpVideoSurfacePutBitsYCbCr *vdp_video_surface_put_bits_ycbcr;
VdpDecoderCreate *vdp_decoder_create;
VdpDecoderDestroy *vdp_decoder_destroy;
VdpDecoderRender *vdp_decoder_render;
VdpDecoderQueryCapabilities *vdp_decoder_query_capabilities;
VdpDecoderGetParameters *vdp_decoder_get_parameters;
VdpVideoMixerCreate *vdp_video_mixer_create;
VdpVideoMixerDestroy *vdp_video_mixer_destroy;
VdpVideoMixerRender *vdp_video_mixer_render;
VdpVideoMixerSetFeatureEnables *vdp_video_mixer_set_feature_enables;
VdpVideoMixerSetAttributeValues *vdp_video_mixer_set_attribute_values;
VdpOutputSurfaceCreate *vdp_output_surface_create;
VdpOutputSurfaceDestroy *vdp_output_surface_destroy;
VdpOutputSurfaceQueryCapabilities *vdp_output_surface_query_capabilities;
VdpOutputSurfaceGetBitsNative *vdp_output_surface_get_bits_native;
VdpPresentationQueueTargetCreateX11 *vdp_presentation_queue_target_create_x11;
VdpPresentationQueueTargetDestroy *vdp_presentation_queue_target_destroy;
VdpPresentationQueueCreate *vdp_presentation_queue_create;
VdpPresentationQueueDestroy *vdp_presentation_queue_destroy;
VdpPresentationQueueDisplay *vdp_presentation_queue_display;
VdpPresentationQueueBlockUntilSurfaceIdle *vdp_presentation_queue_block_until_surface_idle;
VdpPresentationQueueSetBackgroundColor *vdp_presentation_queue_set_background_color;
VdpPresentationQueueQuerySurfaceStatus *vdp_presentation_queue_query_surface_status;
};
GType gst_vdp_device_get_type (void);
GstVdpDevice *gst_vdp_get_device (const gchar *display_name, GError **error);
G_END_DECLS
#endif /* _GST_VDP_DEVICE_H_ */

View file

@ -1,406 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpoutputbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_output_buffer_debug);
#define GST_CAT_DEFAULT gst_vdp_output_buffer_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_output_buffer_debug, "vdpoutputbuffer", 0, "VDPAU output buffer");
GstVdpOutputBuffer *
gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format,
gint width, gint height, GError ** error)
{
GstVdpOutputBuffer *buffer;
VdpStatus status;
VdpOutputSurface surface;
status =
device->vdp_output_surface_create (device->device, rgba_format, width,
height, &surface);
if (status != VDP_STATUS_OK)
goto create_error;
buffer =
(GstVdpOutputBuffer *) gst_mini_object_new (GST_TYPE_VDP_OUTPUT_BUFFER);
buffer->device = g_object_ref (device);
buffer->rgba_format = rgba_format;
buffer->width = width;
buffer->height = height;
buffer->surface = surface;
return buffer;
create_error:
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_READ,
"Couldn't create a VdpOutputSurface, error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return NULL;
}
static GObjectClass *gst_vdp_output_buffer_parent_class;
static void
gst_vdp_output_buffer_finalize (GstVdpOutputBuffer * buffer)
{
GstVdpDevice *device;
VdpStatus status;
if (gst_vdp_buffer_revive (GST_VDP_BUFFER_CAST (buffer)))
return;
device = buffer->device;
status = device->vdp_output_surface_destroy (buffer->surface);
if (status != VDP_STATUS_OK)
GST_ERROR
("Couldn't destroy the buffers VdpOutputSurface, error returned was: %s",
device->vdp_get_error_string (status));
g_object_unref (buffer->device);
GST_MINI_OBJECT_CLASS (gst_vdp_output_buffer_parent_class)->finalize
(GST_MINI_OBJECT (buffer));
}
static void
gst_vdp_output_buffer_init (GstVdpOutputBuffer * buffer, gpointer g_class)
{
buffer->device = NULL;
buffer->surface = VDP_INVALID_HANDLE;
}
static void
gst_vdp_output_buffer_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdp_output_buffer_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdp_output_buffer_finalize;
}
GType
gst_vdp_output_buffer_get_type (void)
{
static GType _gst_vdp_output_buffer_type;
if (G_UNLIKELY (_gst_vdp_output_buffer_type == 0)) {
static const GTypeInfo info = {
sizeof (GstBufferClass),
NULL,
NULL,
gst_vdp_output_buffer_class_init,
NULL,
NULL,
sizeof (GstVdpOutputBuffer),
0,
(GInstanceInitFunc) gst_vdp_output_buffer_init,
NULL
};
_gst_vdp_output_buffer_type = g_type_register_static (GST_TYPE_VDP_BUFFER,
"GstVdpOutputBuffer", &info, 0);
DEBUG_INIT ();
}
return _gst_vdp_output_buffer_type;
}
typedef struct
{
VdpRGBAFormat format;
GstStaticCaps caps;
} GstVdpOutputBufferFormats;
GstVdpOutputBufferFormats rgba_formats[] = {
{VDP_RGBA_FORMAT_A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)8, "
"depth = (int)8, "
"endianness = (int)4321, "
"red_mask = (int)0x00, "
"green_mask = (int)0x00, "
"blue_mask = (int)0x00, " "alpha_mask = (int)0xff")},
{VDP_RGBA_FORMAT_B10G10R10A2,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)30, "
"endianness = (int)4321, "
"red_mask = (int)0x000003fc, "
"green_mask = (int)0x003ff000, "
"blue_mask = (int)0xffc00000, " "alpha_mask = (int)0x00000003")},
{VDP_RGBA_FORMAT_B8G8R8A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)24, "
"endianness = (int)4321, "
"red_mask = (int)0x0000ff00, "
"green_mask = (int)0x00ff0000, "
"blue_mask = (int)0xff000000, " "alpha_mask = (int)0x000000ff")},
{VDP_RGBA_FORMAT_R10G10B10A2,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)30, "
"endianness = (int)4321, "
"red_mask = (int)0xffc00000, "
"green_mask = (int)0x003ff000, "
"blue_mask = (int)0x000003fc, " "alpha_mask = (int)0x00000003")},
{VDP_RGBA_FORMAT_R8G8B8A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)24, "
"endianness = (int)4321, "
"red_mask = (int)0xff000000, "
"green_mask = (int)0x00ff0000, "
"blue_mask = (int)0x0000ff00, " "alpha_mask = (int)0x000000ff")},
};
GstCaps *
gst_vdp_output_buffer_get_template_caps (void)
{
GstCaps *caps, *rgb_caps;
gint i;
caps = gst_caps_new_empty ();
rgb_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (rgba_formats); i++) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-vdpau-output",
"rgba-format", G_TYPE_INT, rgba_formats[i].format,
"width", GST_TYPE_INT_RANGE, 1, 8192,
"height", GST_TYPE_INT_RANGE, 1, 8192, NULL);
gst_caps_append (caps, format_caps);
format_caps = gst_static_caps_get (&rgba_formats[i].caps);
format_caps = gst_caps_copy (format_caps);
gst_caps_set_simple (format_caps,
"width", GST_TYPE_INT_RANGE, 1, 8192,
"height", GST_TYPE_INT_RANGE, 1, 8192, NULL);
gst_caps_append (rgb_caps, format_caps);
}
gst_caps_append (caps, rgb_caps);
return caps;
}
GstCaps *
gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice * device)
{
GstCaps *caps, *rgb_caps;
gint i;
g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL);
caps = gst_caps_new_empty ();
rgb_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (rgba_formats); i++) {
VdpStatus status;
VdpBool is_supported;
guint max_w, max_h;
status = device->vdp_output_surface_query_capabilities (device->device,
rgba_formats[i].format, &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_RGBA_FORMAT) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU output surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-vdpau-output",
"rgba-format", G_TYPE_INT, rgba_formats[i].format,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
format_caps = gst_static_caps_get (&rgba_formats[i].caps);
format_caps = gst_caps_copy (format_caps);
gst_caps_set_simple (format_caps,
"width", GST_TYPE_INT_RANGE, 1, 8192,
"height", GST_TYPE_INT_RANGE, 1, 8192, NULL);
gst_caps_append (rgb_caps, format_caps);
}
}
gst_caps_append (caps, rgb_caps);
error:
return caps;
}
gboolean
gst_vdp_caps_to_rgba_format (GstCaps * caps, VdpRGBAFormat * rgba_format)
{
GstStructure *structure;
gint c_bpp, c_depth, c_endianness, c_red_mask, c_green_mask, c_blue_mask,
c_alpha_mask;
gint i;
g_return_val_if_fail (GST_IS_CAPS (caps), FALSE);
if (!gst_caps_is_fixed (caps))
return FALSE;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_has_name (structure, "video/x-raw-rgb"))
return FALSE;
if (!gst_structure_get_int (structure, "bpp", &c_bpp) ||
!gst_structure_get_int (structure, "depth", &c_depth) ||
!gst_structure_get_int (structure, "endianness", &c_endianness) ||
!gst_structure_get_int (structure, "red_mask", &c_red_mask) ||
!gst_structure_get_int (structure, "green_mask", &c_green_mask) ||
!gst_structure_get_int (structure, "blue_mask", &c_blue_mask) ||
!gst_structure_get_int (structure, "alpha_mask", &c_alpha_mask))
return FALSE;
for (i = 0; i < G_N_ELEMENTS (rgba_formats); i++) {
gint bpp, depth, endianness, red_mask, green_mask, blue_mask, alpha_mask;
GstCaps *rgb_caps = gst_static_caps_get (&rgba_formats[i].caps);
structure = gst_caps_get_structure (rgb_caps, 0);
gst_structure_get_int (structure, "bpp", &bpp);
gst_structure_get_int (structure, "depth", &depth);
gst_structure_get_int (structure, "endianness", &endianness);
gst_structure_get_int (structure, "red_mask", &red_mask);
gst_structure_get_int (structure, "green_mask", &green_mask);
gst_structure_get_int (structure, "blue_mask", &blue_mask);
gst_structure_get_int (structure, "alpha_mask", &alpha_mask);
if (c_bpp == bpp && c_depth == depth && c_endianness == endianness &&
c_red_mask == red_mask && c_green_mask == green_mask &&
c_blue_mask == blue_mask && c_alpha_mask == alpha_mask) {
gst_caps_unref (rgb_caps);
*rgba_format = rgba_formats[i].format;
return TRUE;
}
gst_caps_unref (rgb_caps);
}
return FALSE;
}
gboolean
gst_vdp_output_buffer_calculate_size (GstVdpOutputBuffer * output_buf,
guint * size)
{
g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), FALSE);
switch (output_buf->rgba_format) {
case VDP_RGBA_FORMAT_A8:
{
*size = output_buf->width * output_buf->height;
break;
}
case VDP_RGBA_FORMAT_B10G10R10A2:
case VDP_RGBA_FORMAT_B8G8R8A8:
case VDP_RGBA_FORMAT_R10G10B10A2:
case VDP_RGBA_FORMAT_R8G8B8A8:
{
*size = output_buf->width * output_buf->height * 4;
break;
}
default:
g_assert_not_reached ();
return FALSE;
}
return TRUE;
}
gboolean
gst_vdp_output_buffer_download (GstVdpOutputBuffer * output_buf,
GstBuffer * outbuf, GError ** error)
{
guint8 *data[1];
guint32 stride[1];
GstVdpDevice *device;
VdpOutputSurface surface;
VdpStatus status;
g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), FALSE);
switch (output_buf->rgba_format) {
case VDP_RGBA_FORMAT_A8:
{
stride[0] = output_buf->width;
break;
}
case VDP_RGBA_FORMAT_B10G10R10A2:
case VDP_RGBA_FORMAT_B8G8R8A8:
case VDP_RGBA_FORMAT_R10G10B10A2:
case VDP_RGBA_FORMAT_R8G8B8A8:
{
stride[0] = output_buf->width * 4;
break;
}
default:
return FALSE;
}
device = output_buf->device;
surface = output_buf->surface;
data[0] = GST_BUFFER_DATA (outbuf);
GST_LOG_OBJECT (output_buf, "Entering vdp_output_surface_get_bits_native");
status =
device->vdp_output_surface_get_bits_native (surface, NULL, (void *) data,
stride);
GST_LOG_OBJECT (output_buf,
"Got status %d from vdp_output_get_bits_native", status);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_READ,
"Couldn't get data from vdpau, error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return FALSE;
}
return TRUE;
}

View file

@ -1,62 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_OUTPUT_BUFFER_H_
#define _GST_VDP_OUTPUT_BUFFER_H_
#include <gst/gst.h>
#include "gstvdpdevice.h"
GType gst_vdpau_output_meta_api_get_type (void);
const GstMetaInfo * gst_vdpau_output_meta_get_info (void);
#define GST_VDPAU_OUTPUT_META_GET(buf) ((GstVdpauMeta *)gst_buffer_get_meta(buf,gst_vdpau_output_meta_api_get_type()))
#define GST_VDPAU_OUTPUT_META_ADD(buf) ((GstVdpauMeta *)gst_buffer_add_meta(buf,gst_vdpau_output_meta_get_info(),NULL))
struct _GstVdpauOutputMeta {
GstMeta meta;
/* FIXME : Check we actually need all of this */
GstVdpDevice *device;
VdpRGBAFormat rgba_format;
gint width, height;
VdpOutputSurface surface;
};
#if 0
/* FIXME : Replace with GST_VIDEO_FORMAT... and GST_VIDEO_CHROMA_... */
GstCaps *gst_vdp_output_buffer_get_template_caps (void);
GstCaps *gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice *device);
gboolean gst_vdp_caps_to_rgba_format (GstCaps *caps, VdpRGBAFormat *rgba_format);
gboolean gst_vdp_output_buffer_calculate_size (GstVdpOutputBuffer *output_buf, guint *size);
/* FIXME : Replace with map/unmap */
gboolean gst_vdp_output_buffer_download (GstVdpOutputBuffer *output_buf, GstBuffer *outbuf, GError **error);
#define GST_VDP_OUTPUT_CAPS \
"video/x-vdpau-output, " \
"rgba-format = (int)[0,4], " \
"width = (int)[1,8192], " \
"height = (int)[1,8192]"
#endif
#endif

View file

@ -1,148 +0,0 @@
/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstvdpdevice.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpoutputbufferpool.h"
struct _GstVdpOutputBufferPool
{
GstVdpBufferPool buffer_pool;
VdpRGBAFormat rgba_format;
guint width, height;
};
G_DEFINE_TYPE (GstVdpOutputBufferPool, gst_vdp_output_buffer_pool,
GST_TYPE_VDP_BUFFER_POOL);
GstVdpBufferPool *
gst_vdp_output_buffer_pool_new (GstVdpDevice * device)
{
g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL);
return g_object_new (GST_TYPE_VDP_OUTPUT_BUFFER_POOL, "device", device, NULL);
}
static gboolean
parse_caps (const GstCaps * caps, VdpChromaType * rgba_format, gint * width,
gint * height)
{
GstStructure *structure;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "rgba-format", (gint *) rgba_format))
return FALSE;
if (!gst_structure_get_int (structure, "width", width))
return FALSE;
if (!gst_structure_get_int (structure, "height", height))
return FALSE;
return TRUE;
}
static gboolean
gst_vdp_output_buffer_pool_check_caps (GstVdpBufferPool * bpool,
const GstCaps * caps)
{
GstVdpOutputBufferPool *opool = GST_VDP_OUTPUT_BUFFER_POOL (bpool);
VdpChromaType rgba_format;
gint width, height;
if (!parse_caps (caps, &rgba_format, &width, &height))
return FALSE;
if (rgba_format != opool->rgba_format || width != opool->width ||
height != opool->height)
return FALSE;
return TRUE;
}
static gboolean
gst_vdp_output_buffer_pool_set_caps (GstVdpBufferPool * bpool,
const GstCaps * caps, gboolean * clear_bufs)
{
GstVdpOutputBufferPool *opool = GST_VDP_OUTPUT_BUFFER_POOL (bpool);
VdpChromaType rgba_format;
gint width, height;
if (!parse_caps (caps, &rgba_format, &width, &height))
return FALSE;
if (rgba_format != opool->rgba_format || width != opool->width ||
height != opool->height)
*clear_bufs = TRUE;
else
*clear_bufs = FALSE;
opool->rgba_format = rgba_format;
opool->width = width;
opool->height = height;
return TRUE;
}
static GstVdpBuffer *
gst_vdp_output_buffer_pool_alloc_buffer (GstVdpBufferPool * bpool,
GError ** error)
{
GstVdpOutputBufferPool *opool = GST_VDP_OUTPUT_BUFFER_POOL (bpool);
GstVdpDevice *device;
device = gst_vdp_buffer_pool_get_device (bpool);
return GST_VDP_BUFFER_CAST (gst_vdp_output_buffer_new (device,
opool->rgba_format, opool->width, opool->height, error));
}
static void
gst_vdp_output_buffer_pool_finalize (GObject * object)
{
/* TODO: Add deinitalization code here */
G_OBJECT_CLASS (gst_vdp_output_buffer_pool_parent_class)->finalize (object);
}
static void
gst_vdp_output_buffer_pool_init (GstVdpOutputBufferPool * opool)
{
opool->rgba_format = -1;
opool->width = 0;
opool->height = 0;
}
static void
gst_vdp_output_buffer_pool_class_init (GstVdpOutputBufferPoolClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstVdpBufferPoolClass *buffer_pool_class = GST_VDP_BUFFER_POOL_CLASS (klass);
buffer_pool_class->alloc_buffer = gst_vdp_output_buffer_pool_alloc_buffer;
buffer_pool_class->set_caps = gst_vdp_output_buffer_pool_set_caps;
buffer_pool_class->check_caps = gst_vdp_output_buffer_pool_check_caps;
object_class->finalize = gst_vdp_output_buffer_pool_finalize;
}

View file

@ -1,50 +0,0 @@
/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_OUTPUT_BUFFER_POOL_H_
#define _GST_VDP_OUTPUT_BUFFER_POOL_H_
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_VDP_OUTPUT_BUFFER_POOL (gst_vdp_output_buffer_pool_get_type ())
#define GST_VDP_OUTPUT_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_OUTPUT_BUFFER_POOL, GstVdpOutputBufferPool))
#define GST_VDP_OUTPUT_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_OUTPUT_BUFFER_POOL, GstVdpOutputBufferPoolClass))
#define GST_IS_VDP_OUTPUT_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_OUTPUT_BUFFER_POOL))
#define GST_IS_VDP_OUTPUT_BUFFER_POOL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_OUTPUT_BUFFER_POOL))
#define GST_VDP_OUTPUT_BUFFER_POOL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_OUTPUT_BUFFER_POOL, GstVdpOutputBufferPoolClass))
typedef struct _GstVdpOutputBufferPool GstVdpOutputBufferPool;
typedef struct _GstVdpOutputBufferPoolClass GstVdpOutputBufferPoolClass;
struct _GstVdpOutputBufferPoolClass
{
GstVdpBufferPoolClass buffer_pool_class;
};
GstVdpBufferPool *gst_vdp_output_buffer_pool_new (GstVdpDevice *device);
GType gst_vdp_output_buffer_pool_get_type (void) G_GNUC_CONST;
G_END_DECLS
#endif /* _GST_VDP_OUTPUT_BUFFER_POOL_H_ */

File diff suppressed because it is too large Load diff

View file

@ -1,138 +0,0 @@
/* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
* Copyright (C) 2005 Julien Moutte <julien@moutte.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_SINK_H__
#define __GST_VDP_SINK_H__
#include <gst/video/gstvideosink.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <string.h>
#include <math.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_SINK \
(gst_vdp_sink_get_type())
#define GST_VDP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VDP_SINK, VdpSink))
#define GST_VDP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VDP_SINK, VdpSinkClass))
#define GST_IS_VDP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VDP_SINK))
#define GST_IS_VDP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VDP_SINK))
typedef struct _GstXContext GstXContext;
typedef struct _GstVdpWindow GstVdpWindow;
typedef struct _VdpSink VdpSink;
typedef struct _VdpSinkClass VdpSinkClass;
/*
* GstVdpWindow:
* @win: the Window ID of this X11 window
* @target the VdpPresentationQueueTarget of this window
* @queue the VdpPresentationQueue of this window
* @width: the width in pixels of Window @win
* @height: the height in pixels of Window @win
* @internal: used to remember if Window @win was created internally or passed
* through the #GstXOverlay interface
*
* Structure used to store informations about a Window.
*/
struct _GstVdpWindow {
Window win;
VdpPresentationQueueTarget target;
VdpPresentationQueue queue;
gint width, height;
gboolean internal;
};
/**
* VdpSink:
* @display_name: the name of the Display we want to render to
* @device: the GstVdpDevice associated with the display_name
* @window: the #GstVdpWindow we are rendering to
* @cur_image: a reference to the last #GstBuffer that was put to @window. It
* is used when Expose events are received to redraw the latest video frame
* @event_thread: a thread listening for events on @window and handling them
* @running: used to inform @event_thread if it should run/shutdown
* @fps_n: the framerate fraction numerator
* @fps_d: the framerate fraction denominator
* @x_lock: used to protect X calls as we are not using the XLib in threaded
* mode
* @flow_lock: used to protect data flow routines from external calls such as
* events from @event_thread or methods from the #GstXOverlay interface
* @par: used to override calculated pixel aspect ratio from @xcontext
* @synchronous: used to store if XSynchronous should be used or not (for
* debugging purpose only)
* @handle_events: used to know if we should handle select XEvents or not
*
* The #VdpSink data structure.
*/
struct _VdpSink {
/* Our element stuff */
GstVideoSink videosink;
char *display_name;
GstVdpDevice *device;
GstBufferPool *bpool;
GstCaps *caps;
GstVdpWindow *window;
GstBuffer *cur_image;
GThread *event_thread;
gboolean running;
/* Framerate numerator and denominator */
gint fps_n;
gint fps_d;
GMutex *device_lock;
GMutex *x_lock;
GMutex *flow_lock;
/* object-set pixel aspect ratio */
GValue *par;
gboolean synchronous;
gboolean handle_events;
gboolean handle_expose;
/* stream metadata */
gchar *media_title;
};
struct _VdpSinkClass {
GstVideoSinkClass parent_class;
};
GType gst_vdp_sink_get_type(void);
G_END_DECLS
#endif /* __GST_VDP_SINK_H__ */

View file

@ -1,89 +0,0 @@
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdputils.h"
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
GstVideoFormat vformat;
} GstVdpVideoBufferFormats;
static const GstVdpVideoBufferFormats yuv_formats[] = {
{VDP_CHROMA_TYPE_420, VDP_YCBCR_FORMAT_YV12, GST_VIDEO_FORMAT_YV12},
{VDP_CHROMA_TYPE_420, VDP_YCBCR_FORMAT_NV12, GST_VIDEO_FORMAT_NV12},
{VDP_CHROMA_TYPE_422, VDP_YCBCR_FORMAT_UYVY, GST_VIDEO_FORMAT_UYVY},
{VDP_CHROMA_TYPE_444, VDP_YCBCR_FORMAT_V8U8Y8A8, GST_VIDEO_FORMAT_AYUV},
/* { */
/* VDP_CHROMA_TYPE_444, */
/* VDP_YCBCR_FORMAT_Y8U8V8A8, */
/* GST_MAKE_FOURCC ('A', 'V', 'U', 'Y') */
/* }, */
{VDP_CHROMA_TYPE_422, VDP_YCBCR_FORMAT_YUYV, GST_VIDEO_FORMAT_YUY2}
};
VdpYCbCrFormat
gst_video_format_to_vdp_ycbcr (GstVideoFormat format)
{
int i;
for (i = 0; i < G_N_ELEMENTS (yuv_formats); i++) {
if (yuv_formats[i].vformat == format)
return yuv_formats[i].format;
}
return -1;
}
VdpChromaType
gst_video_info_to_vdp_chroma_type (GstVideoInfo * info)
{
const GstVideoFormatInfo *finfo = info->finfo;
VdpChromaType ret = -1;
/* Check subsampling of second plane (first is always non-subsampled) */
switch (GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1)) {
case 0:
/* Not subsampled in width for second plane */
if (GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 2))
/* Not subsampled at all (4:4:4) */
ret = VDP_CHROMA_TYPE_444;
break;
case 1:
/* Subsampled horizontally once */
if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 2) == 0)
/* Not subsampled vertically (4:2:2) */
ret = VDP_CHROMA_TYPE_422;
else if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 2) == 1)
/* Subsampled vertically once (4:2:0) */
ret = VDP_CHROMA_TYPE_420;
break;
default:
break;
}
return ret;
}

View file

@ -1,38 +0,0 @@
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_VDP_UTILS_H_
#define _GST_VDP_UTILS_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include <gst/video/gstvideopool.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
VdpChromaType gst_video_info_to_vdp_chroma_type (GstVideoInfo *info);
VdpYCbCrFormat gst_video_format_to_vdp_ycbcr (GstVideoFormat format);
G_END_DECLS
#endif /* _GST_VDP_UTILS_H_ */

View file

@ -1,211 +0,0 @@
/*
* gst-plugins-bad
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpvideobufferpool.h"
#include "gstvdpvideomemory.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_vidbufpool_debug);
#define GST_CAT_DEFAULT gst_vdp_vidbufpool_debug
static void gst_vdp_video_buffer_pool_finalize (GObject * object);
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_vidbufpool_debug, "vdpvideopool", 0, \
"VDPAU Video bufferpool");
#define gst_vdp_video_buffer_pool_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpVideoBufferPool, gst_vdp_video_buffer_pool,
GST_TYPE_BUFFER_POOL, DEBUG_INIT);
static const gchar **
gst_vdp_video_buffer_pool_get_options (GstBufferPool * pool)
{
static const gchar *options[] = { GST_BUFFER_POOL_OPTION_VIDEO_META,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META, NULL
};
return options;
}
static gboolean
gst_vdp_video_buffer_pool_set_config (GstBufferPool * pool,
GstStructure * config)
{
GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
GstVideoInfo info;
GstCaps *caps;
if (!gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL))
goto wrong_config;
if (caps == NULL)
goto no_caps;
/* now parse the caps from the config */
if (!gst_video_info_from_caps (&info, caps))
goto wrong_caps;
GST_LOG_OBJECT (pool, "%dx%d, caps %" GST_PTR_FORMAT, info.width, info.height,
caps);
if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_UNKNOWN)
goto unknown_format;
vdppool->info = info;
/* enable metadata based on config of the pool */
vdppool->add_videometa =
gst_buffer_pool_config_has_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
/* parse extra alignment info */
vdppool->add_vdpmeta = gst_buffer_pool_config_has_option (config,
GST_BUFFER_POOL_OPTION_VDP_VIDEO_META);
return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config);
/* ERRORS */
wrong_config:
{
GST_WARNING_OBJECT (pool, "invalid config");
return FALSE;
}
no_caps:
{
GST_WARNING_OBJECT (pool, "no caps in config");
return FALSE;
}
wrong_caps:
{
GST_WARNING_OBJECT (pool,
"failed getting geometry from caps %" GST_PTR_FORMAT, caps);
return FALSE;
}
unknown_format:
{
GST_WARNING_OBJECT (vdppool, "failed to get format from caps %"
GST_PTR_FORMAT, caps);
GST_ELEMENT_ERROR (vdppool, RESOURCE, WRITE,
("Failed to create output image buffer of %dx%d pixels",
info.width, info.height),
("Invalid input caps %" GST_PTR_FORMAT, caps));
return FALSE;
}
}
/* This function handles GstBuffer creation */
static GstFlowReturn
gst_vdp_video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
GstBufferPoolAcquireParams * params)
{
GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
GstVideoInfo *info;
GstBuffer *buf;
GstMemory *vdp_mem;
info = &vdppool->info;
if (!(buf = gst_buffer_new ()))
goto no_buffer;
if (!(vdp_mem = gst_vdp_video_memory_alloc (vdppool->device, info)))
goto mem_create_failed;
gst_buffer_append_memory (buf, vdp_mem);
if (vdppool->add_videometa) {
GstVideoMeta *vmeta;
GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
/* these are just the defaults for now */
vmeta = gst_buffer_add_video_meta (buf, 0, GST_VIDEO_INFO_FORMAT (info),
GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info));
vmeta->map = gst_vdp_video_memory_map;
vmeta->unmap = gst_vdp_video_memory_unmap;
}
*buffer = buf;
return GST_FLOW_OK;
/* ERROR */
no_buffer:
{
GST_WARNING_OBJECT (pool, "can't create image");
return GST_FLOW_ERROR;
}
mem_create_failed:
{
GST_WARNING_OBJECT (pool, "Could create GstVdpVideo Memory");
return GST_FLOW_ERROR;
}
}
GstBufferPool *
gst_vdp_video_buffer_pool_new (GstVdpDevice * device)
{
GstVdpVideoBufferPool *pool;
pool = g_object_new (GST_TYPE_VDP_VIDEO_BUFFER_POOL, NULL);
g_object_ref_sink (pool);
pool->device = gst_object_ref (device);
GST_LOG_OBJECT (pool, "new VdpVideo buffer pool %p", pool);
return GST_BUFFER_POOL_CAST (pool);
}
static void
gst_vdp_video_buffer_pool_class_init (GstVdpVideoBufferPoolClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstBufferPoolClass *gstbufferpool_class = (GstBufferPoolClass *) klass;
gobject_class->finalize = gst_vdp_video_buffer_pool_finalize;
gstbufferpool_class->get_options = gst_vdp_video_buffer_pool_get_options;
gstbufferpool_class->set_config = gst_vdp_video_buffer_pool_set_config;
gstbufferpool_class->alloc_buffer = gst_vdp_video_buffer_pool_alloc;
}
static void
gst_vdp_video_buffer_pool_init (GstVdpVideoBufferPool * pool)
{
}
static void
gst_vdp_video_buffer_pool_finalize (GObject * object)
{
GstVdpVideoBufferPool *pool = GST_VDP_VIDEO_BUFFER_POOL_CAST (object);
GST_LOG_OBJECT (pool, "finalize VdpVideo buffer pool %p", pool);
gst_object_unref (pool->device);
G_OBJECT_CLASS (gst_vdp_video_buffer_pool_parent_class)->finalize (object);
}

View file

@ -1,97 +0,0 @@
/*
* gst-plugins-bad
* Copyright (C) Carl-Anton Ingmarsson 2010 <ca.ingmarsson@gmail.com>
* 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_VIDEO_BUFFERPOOL_H_
#define _GST_VDP_VIDEO_BUFFERPOOL_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include <gst/video/gstvideopool.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_VDPAU_SURFACE_META_GET(buf) ((GstVdpauMeta *)gst_buffer_get_meta(buf,gst_vdpau_surface_meta_api_get_type()))
#define GST_VDPAU_SURFACE_META_ADD(buf) ((GstVdpauMeta *)gst_buffer_add_meta(buf,gst_vdpau_surface_meta_get_info(),NULL))
struct _GstVdpauSurfaceMeta {
GstMeta meta;
GstVdpDevice *device;
VdpVideoSurface surface;
};
GType gst_vdpau_surface_meta_api_get_type (void);
const GstMetaInfo * gst_vdpau_surface_meta_get_info (void);
/**
* GST_BUFFER_POOL_OPTION_VDP_VIDEO_META:
*
* An option that can be activated on bufferpool to request VdpVideo metadata
* on buffers from the pool.
*/
#define GST_BUFFER_POOL_OPTION_VDP_VIDEO_META "GstBufferPoolOptionVdpVideoMeta"
typedef struct _GstVdpVideoBufferPool GstVdpVideoBufferPool;
typedef struct _GstVdpVideoBufferPoolClass GstVdpVideoBufferPoolClass;
/* buffer pool functions */
#define GST_TYPE_VDP_VIDEO_BUFFER_POOL (gst_vdp_video_buffer_pool_get_type())
#define GST_IS_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL))
#define GST_VDP_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER_POOL, GstVdpVideoBufferPool))
#define GST_VDP_VIDEO_BUFFER_POOL_CAST(obj) ((GstVdpVideoBufferPool*)(obj))
struct _GstVdpVideoBufferPool
{
GstBufferPool bufferpool;
GstVdpDevice *device;
GstVideoInfo info;
VdpChromaType chroma_type;
gboolean add_videometa;
gboolean add_vdpmeta;
};
struct _GstVdpVideoBufferPoolClass
{
GstBufferPoolClass parent_class;
};
GType gst_vdp_video_buffer_pool_get_type (void);
GstBufferPool *gst_vdp_video_buffer_pool_new (GstVdpDevice *device);
GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type);
#if 0
GstCaps *gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device);
gboolean gst_vdp_video_buffer_calculate_size (guint32 fourcc, gint width, gint height, guint *size);
/* FIXME : Replace with map/unmap */
gboolean gst_vdp_video_buffer_download (GstVdpVideoBuffer *inbuf, GstBuffer *outbuf, guint32 fourcc, gint width, gint height);
gboolean gst_vdp_video_buffer_upload (GstVdpVideoBuffer *video_buf, GstBuffer *src_buf, guint fourcc, gint width, gint height);
#endif
G_END_DECLS
#endif /* _GST_VDP_VIDEO_BUFFER_POOL_H_ */

View file

@ -1,334 +0,0 @@
/*
* GStreamer
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/video/video.h>
#include "gstvdpvideomemory.h"
#include "gstvdputils.h"
GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_mem_debug);
#define GST_CAT_DEFAULT gst_vdp_video_mem_debug
static GstAllocator *_vdp_video_allocator;
static void
_vdp_video_mem_init (GstVdpVideoMemory * mem, GstAllocator * allocator,
GstMemory * parent, GstVdpDevice * device, GstVideoInfo * info)
{
gst_memory_init (GST_MEMORY_CAST (mem), GST_MEMORY_FLAG_NO_SHARE,
allocator, parent, GST_VIDEO_INFO_SIZE (info), 0, 0,
GST_VIDEO_INFO_SIZE (info));
mem->device = gst_object_ref (device);
mem->info = info;
mem->chroma_type = gst_video_info_to_vdp_chroma_type (info);
mem->ycbcr_format =
gst_video_format_to_vdp_ycbcr (GST_VIDEO_INFO_FORMAT (info));
mem->refcount = 0;
GST_DEBUG ("new VdpVideo memory");
}
static GstVdpVideoMemory *
_vdp_video_mem_new (GstAllocator * allocator, GstMemory * parent,
GstVdpDevice * device, GstVideoInfo * info)
{
VdpStatus status;
GstVdpVideoMemory *mem;
VdpVideoSurface surface;
mem = g_slice_new0 (GstVdpVideoMemory);
_vdp_video_mem_init (mem, allocator, parent, device, info);
GST_TRACE
("Calling VdpVideoSurfaceCreate(chroma_type:%d, width:%d, height:%d)",
mem->chroma_type, mem->info->width, mem->info->height);
status =
device->vdp_video_surface_create (device->device, mem->chroma_type,
mem->info->width, mem->info->height, &surface);
if (status != VDP_STATUS_OK)
goto create_error;
/* device->vdp_video_surface_get_parameters (device->device, &chroma_type, */
/* &width, &height); */
GST_TRACE ("created surface %u", surface);
mem->surface = surface;
return mem;
/* ERRORS */
create_error:
{
GST_ERROR ("Failed to create video surface: %s",
device->vdp_get_error_string (status));
g_slice_free (GstVdpVideoMemory, mem);
return NULL;
}
}
static gboolean
ensure_data (GstVdpVideoMemory * vmem)
{
VdpStatus vdp_stat;
GstVideoInfo *info = vmem->info;
#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
#endif
if (g_atomic_int_add (&vmem->refcount, 1) > 1)
return TRUE;
/* Allocate enough room to store data */
vmem->cache = g_malloc (GST_VIDEO_INFO_SIZE (info));
vmem->cached_data[0] = vmem->cache;
vmem->cached_data[1] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 1);
vmem->cached_data[2] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 2);
vmem->destination_pitches[0] = GST_VIDEO_INFO_PLANE_STRIDE (info, 0);
vmem->destination_pitches[1] = GST_VIDEO_INFO_PLANE_STRIDE (info, 1);
vmem->destination_pitches[2] = GST_VIDEO_INFO_PLANE_STRIDE (info, 2);
GST_DEBUG ("cached_data %p %p %p",
vmem->cached_data[0], vmem->cached_data[1], vmem->cached_data[2]);
GST_DEBUG ("pitches %d %d %d",
vmem->destination_pitches[0],
vmem->destination_pitches[1], vmem->destination_pitches[2]);
#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
#endif
vdp_stat =
vmem->device->vdp_video_surface_get_bits_ycbcr (vmem->surface,
vmem->ycbcr_format, vmem->cached_data, vmem->destination_pitches);
#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
#endif
GST_CAT_WARNING (GST_CAT_PERFORMANCE, "Downloading took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));
if (vdp_stat != VDP_STATUS_OK) {
GST_ERROR ("Failed to get bits : %s",
vmem->device->vdp_get_error_string (vdp_stat));
g_free (vmem->cache);
vmem->cache = NULL;
return FALSE;
}
return TRUE;
}
static void
release_data (GstVdpVideoMemory * vmem)
{
g_return_if_fail (vmem->refcount > 0);
if (g_atomic_int_dec_and_test (&vmem->refcount)) {
g_free (vmem->cache);
}
}
static gpointer
_vdp_video_mem_map (GstVdpVideoMemory * vmem, gsize maxsize, GstMapFlags flags)
{
GST_DEBUG ("surface:%d, maxsize:%" G_GSIZE_FORMAT ", flags:%d",
vmem->surface, maxsize, flags);
if (!ensure_data (vmem))
return NULL;
return vmem->cache;
}
static void
_vdp_video_mem_unmap (GstVdpVideoMemory * vmem)
{
GST_DEBUG ("surface:%d", vmem->surface);
release_data (vmem);
}
static GstMemory *
_vdp_video_mem_copy (GstVdpVideoMemory * src, gssize offset, gssize size)
{
GST_FIXME ("Implement !");
return NULL;
}
static GstMemory *
_vdp_video_mem_share (GstVdpVideoMemory * mem, gssize offset, gssize size)
{
GST_FIXME ("Implement !");
return NULL;
}
static gboolean
_vdp_video_mem_is_span (GstVdpVideoMemory * mem1, GstVdpVideoMemory * mem2,
gsize * offset)
{
return FALSE;
}
static GstMemory *
_vdp_video_mem_alloc (GstAllocator * allocator, gsize size,
GstAllocationParams * params)
{
g_warning ("use gst_vdp_video_memory_alloc () to allocate from this "
"GstVdpVideoMemory allocator");
return NULL;
}
static void
_vdp_video_mem_free (GstAllocator * allocator, GstMemory * mem)
{
GstVdpVideoMemory *vmem = (GstVdpVideoMemory *) mem;
VdpStatus status;
GST_DEBUG ("Destroying surface %d", vmem->surface);
status = vmem->device->vdp_video_surface_destroy (vmem->surface);
if (status != VDP_STATUS_OK)
GST_ERROR ("Couldn't destroy the VdpVideoSurface: %s",
vmem->device->vdp_get_error_string (status));
gst_object_unref (vmem->device);
g_free (vmem->cache);
g_slice_free (GstVdpVideoMemory, vmem);
}
/**
* gst_vdp_video_memory_alloc:
* @device: a #GstVdpDevice
* @info: the #GstVideoInfo describing the format to use
*
* Returns: a GstMemory object with a VdpVideoSurface specified by @info
* from @device
*/
GstMemory *
gst_vdp_video_memory_alloc (GstVdpDevice * device, GstVideoInfo * info)
{
return (GstMemory *) _vdp_video_mem_new (_vdp_video_allocator, NULL, device,
info);
}
G_DEFINE_TYPE (GstVdpVideoAllocator, gst_vdp_video_allocator,
GST_TYPE_ALLOCATOR);
static void
gst_vdp_video_allocator_class_init (GstVdpVideoAllocatorClass * klass)
{
GstAllocatorClass *allocator_class;
allocator_class = (GstAllocatorClass *) klass;
allocator_class->alloc = _vdp_video_mem_alloc;
allocator_class->free = _vdp_video_mem_free;
}
static void
gst_vdp_video_allocator_init (GstVdpVideoAllocator * allocator)
{
GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
alloc->mem_type = GST_VDP_VIDEO_MEMORY_ALLOCATOR;
alloc->mem_map = (GstMemoryMapFunction) _vdp_video_mem_map;
alloc->mem_unmap = (GstMemoryUnmapFunction) _vdp_video_mem_unmap;
alloc->mem_copy = (GstMemoryCopyFunction) _vdp_video_mem_copy;
alloc->mem_share = (GstMemoryShareFunction) _vdp_video_mem_share;
alloc->mem_is_span = (GstMemoryIsSpanFunction) _vdp_video_mem_is_span;
}
/**
* gst_vdp_video_memory_init:
*
* Initializes the GL Memory allocator. It is safe to call this function
* multiple times. This must be called before any other GstVdpVideoMemory operation.
*/
void
gst_vdp_video_memory_init (void)
{
static volatile gsize _init = 0;
if (g_once_init_enter (&_init)) {
_vdp_video_allocator =
g_object_new (gst_vdp_video_allocator_get_type (), NULL);
gst_object_ref_sink (_vdp_video_allocator);
gst_allocator_register (GST_VDP_VIDEO_MEMORY_ALLOCATOR,
gst_object_ref (_vdp_video_allocator));
GST_DEBUG_CATEGORY_INIT (gst_vdp_video_mem_debug, "vdpvideomem", 0,
"VDPAU VideoSurface Memory/Allocator");
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
g_once_init_leave (&_init, 1);
}
}
gboolean
gst_vdp_video_memory_map (GstVideoMeta * meta, guint plane, GstMapInfo * info,
gpointer * data, gint * stride, GstMapFlags flags)
{
GstBuffer *buffer = meta->buffer;
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (buffer, 0);
/* Only handle GstVdpVideoMemory */
g_return_val_if_fail (((GstMemory *) vmem)->allocator == _vdp_video_allocator,
FALSE);
GST_DEBUG ("plane:%d", plane);
/* download if not already done */
if (!ensure_data (vmem))
return FALSE;
*data = vmem->cached_data[plane];
*stride = vmem->destination_pitches[plane];
return TRUE;
}
gboolean
gst_vdp_video_memory_unmap (GstVideoMeta * meta, guint plane, GstMapInfo * info)
{
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (meta->buffer, 0);
GST_DEBUG ("plane:%d", plane);
GST_FIXME ("implement unmap (and potential upload on last unmap)");
release_data (vmem);
return TRUE;
}

View file

@ -1,103 +0,0 @@
/*
* GStreamer
* Copyright (C) 2012 Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_VIDEO_MEMORY_H_
#define _GST_VDP_VIDEO_MEMORY_H_
#include <gst/gst.h>
#include <gst/gstmemory.h>
#include <gst/gstallocator.h>
#include <gst/video/video-info.h>
#include <gst/video/gstvideometa.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_VIDEO_ALLOCATOR (gst_vdp_video_allocator_get_type())
GType gst_vdp_video_allocator_get_type(void);
#define GST_IS_VDP_VIDEO_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR))
#define GST_IS_VDP_VIDEO_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_VIDEO_ALLOCATOR))
#define GST_VDP_VIDEO_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocatorClass))
#define GST_VDP_VIDEO_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocator))
#define GST_VDP_VIDEO_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_VIDEO_ALLOCATOR, GstVdpVideoAllocatorClass))
#define GST_VDP_VIDEO_ALLOCATOR_CAST(obj) ((GstVdpVideoAllocator *)(obj))
typedef struct _GstVdpVideoMemory GstVdpVideoMemory;
typedef struct _GstVdpVideoAllocator GstVdpVideoAllocator;
typedef struct _GstVdpVideoAllocatorClass GstVdpVideoAllocatorClass;
/**
* GstVdpVideoMemory:
* @mem: the parent object
* @device: the #GstVdpDevice to use
* @surface: the #VdpVideoSurface
*
* Represents information about a #VdpVideoSurface
*/
struct _GstVdpVideoMemory
{
GstMemory mem;
GstVdpDevice *device;
VdpVideoSurface surface;
GstVideoInfo *info;
VdpChromaType chroma_type;
VdpYCbCrFormat ycbcr_format;
/* Cached data for mapping */
volatile gint refcount;
GstMapFlags map_flags;
guint n_planes;
guint8 *cache;
void * cached_data[4];
uint32_t destination_pitches[4];
};
#define GST_VDP_VIDEO_MEMORY_ALLOCATOR "VdpVideoMemory"
#define GST_CAPS_FEATURE_MEMORY_VDPAU "memory:VdpVideoSurface"
void gst_vdp_video_memory_init (void);
GstMemory *
gst_vdp_video_memory_alloc (GstVdpDevice * device, GstVideoInfo *info);
gboolean gst_vdp_video_memory_map(GstVideoMeta * meta, guint plane,
GstMapInfo * info, gpointer * data,
gint * stride, GstMapFlags flags);
gboolean gst_vdp_video_memory_unmap(GstVideoMeta * meta, guint plane,
GstMapInfo * info);
struct _GstVdpVideoAllocator
{
GstAllocator parent;
};
struct _GstVdpVideoAllocatorClass
{
GstAllocatorClass parent_class;
};
G_END_DECLS
#endif /* _GST_VDP_VIDEO_MEMORY_H_ */

File diff suppressed because it is too large Load diff

View file

@ -1,116 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_VIDEO_POST_PROCESS_H__
#define __GST_VDP_VIDEO_POST_PROCESS_H__
#include <gst/gst.h>
#include "gstvdpdevice.h"
#include "gstvdpvideobufferpool.h"
G_BEGIN_DECLS
#define MAX_PICTURES 6
typedef struct _GstVdpPicture GstVdpPicture;
struct _GstVdpPicture
{
GstBuffer *buf;
VdpVideoMixerPictureStructure structure;
GstClockTime timestamp;
};
typedef enum
{
GST_VDP_DEINTERLACE_MODE_AUTO,
GST_VDP_DEINTERLACE_MODE_INTERLACED,
GST_VDP_DEINTERLACE_MODE_DISABLED
} GstVdpDeinterlaceModes;
typedef enum
{
GST_VDP_DEINTERLACE_METHOD_BOB,
GST_VDP_DEINTERLACE_METHOD_TEMPORAL,
GST_VDP_DEINTERLACE_METHOD_TEMPORAL_SPATIAL
} GstVdpDeinterlaceMethods;
#define GST_TYPE_VDP_VIDEO_POST_PROCESS (gst_vdp_vpp_get_type())
#define GST_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcess))
#define GST_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcessClass))
#define GST_IS_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS))
#define GST_IS_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS))
typedef struct _GstVdpVideoPostProcess GstVdpVideoPostProcess;
typedef struct _GstVdpVideoPostProcessClass GstVdpVideoPostProcessClass;
struct _GstVdpVideoPostProcess
{
GstElement element;
GstPad *sinkpad, *srcpad;
gboolean native_input;
VdpChromaType chroma_type;
gint width, height;
guint32 fourcc;
GstBufferPool *vpool;
gboolean got_par;
gint par_n, par_d;
gboolean interlaced;
GstClockTime field_duration;
GstSegment segment;
GstClockTime earliest_time;
gboolean discont;
GstVdpDevice *device;
VdpVideoMixer mixer;
GstVdpPicture future_pictures[MAX_PICTURES];
guint n_future_pictures;
GstVdpPicture past_pictures[MAX_PICTURES];
guint n_past_pictures;
gboolean force_aspect_ratio;
GstVdpDeinterlaceModes mode;
GstVdpDeinterlaceMethods method;
/* properties */
gchar *display;
gfloat noise_reduction;
gfloat sharpening;
gboolean inverse_telecine;
};
struct _GstVdpVideoPostProcessClass
{
GstElementClass element_class;
};
GType gst_vdp_vpp_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_VIDEO_POST_PROCESS_H__ */

View file

@ -1,427 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gsth264dpb.h"
#include "gstvdpvideomemory.h"
/* Properties */
enum
{
PROP_0,
PROP_NUM_REF_FRAMES,
PROP_MAX_LONGTERM_IDX
};
GST_DEBUG_CATEGORY_STATIC (h264dpb_debug);
#define GST_CAT_DEFAULT h264dpb_debug
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (h264dpb_debug, "vdph264dpb", 0, \
"VDPAU H264 DPB");
G_DEFINE_TYPE_WITH_CODE (GstH264DPB, gst_h264_dpb, G_TYPE_OBJECT, DEBUG_INIT);
void
gst_h264_dpb_fill_reference_frames (GstH264DPB * dpb,
VdpReferenceFrameH264 reference_frames[16])
{
GstH264Frame **frames;
guint i;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
GstH264Frame *frame = frames[i];
GstVdpVideoMemory *vmem =
(GstVdpVideoMemory *) gst_buffer_get_memory (frame->frame->
output_buffer, 0);
reference_frames[i].surface = vmem->surface;
reference_frames[i].is_long_term = frame->is_long_term;
reference_frames[i].top_is_reference = frame->is_reference;
reference_frames[i].bottom_is_reference = frame->is_reference;
reference_frames[i].field_order_cnt[0] = frame->poc;
reference_frames[i].field_order_cnt[1] = frame->poc;
reference_frames[i].frame_idx = frame->frame_idx;
}
for (i = dpb->n_frames; i < 16; i++) {
reference_frames[i].surface = VDP_INVALID_HANDLE;
reference_frames[i].is_long_term = FALSE;
reference_frames[i].top_is_reference = VDP_FALSE;
reference_frames[i].bottom_is_reference = VDP_FALSE;
reference_frames[i].field_order_cnt[0] = 0;
reference_frames[i].field_order_cnt[1] = 0;
reference_frames[i].frame_idx = 0;
}
}
static void
gst_h264_dpb_remove (GstH264DPB * dpb, guint idx)
{
GstH264Frame **frames;
guint i;
frames = dpb->frames;
gst_video_codec_frame_unref (frames[idx]->frame);
dpb->n_frames--;
for (i = idx; i < dpb->n_frames; i++)
frames[i] = frames[i + 1];
}
static GstFlowReturn
gst_h264_dpb_output (GstH264DPB * dpb, guint idx)
{
GstFlowReturn ret;
GstH264Frame *frame = dpb->frames[idx];
gst_video_codec_frame_ref (frame->frame);
ret = dpb->output (dpb, frame, dpb->user_data);
frame->output_needed = FALSE;
if (!frame->is_reference)
gst_h264_dpb_remove (dpb, idx);
return ret;
}
static gboolean
gst_h264_dpb_bump (GstH264DPB * dpb, guint poc, GstFlowReturn * ret)
{
GstH264Frame **frames;
guint i;
gint bump_idx;
frames = dpb->frames;
bump_idx = -1;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->output_needed) {
bump_idx = i;
break;
}
}
if (bump_idx != -1) {
for (i = bump_idx + 1; i < dpb->n_frames; i++) {
if (frames[i]->output_needed && (frames[i]->poc < frames[bump_idx]->poc)) {
bump_idx = i;
}
}
if (frames[bump_idx]->poc < poc) {
*ret = gst_h264_dpb_output (dpb, bump_idx);
return TRUE;
}
}
return FALSE;
}
GstFlowReturn
gst_h264_dpb_add (GstH264DPB * dpb, GstH264Frame * h264_frame)
{
GstFlowReturn ret = GST_FLOW_OK;
GST_DEBUG ("add frame with poc: %d", h264_frame->poc);
if (h264_frame->is_reference && h264_frame->is_long_term &&
(h264_frame->frame_idx > dpb->max_longterm_frame_idx))
h264_frame->is_reference = FALSE;
if (h264_frame->is_reference) {
while (dpb->n_frames == dpb->max_frames) {
if (!gst_h264_dpb_bump (dpb, G_MAXUINT, &ret))
goto no_room;
}
GST_DEBUG ("Storing frame in slot %d", dpb->n_frames);
dpb->frames[dpb->n_frames++] = h264_frame;
} else {
while (gst_h264_dpb_bump (dpb, h264_frame->poc, &ret)) {
if (ret != GST_FLOW_OK)
return ret;
}
ret = dpb->output (dpb, h264_frame, dpb->user_data);
}
return ret;
/* ERRORS */
no_room:
{
GST_ERROR_OBJECT (dpb, "Couldn't make room in DPB");
return GST_FLOW_OK;
}
}
void
gst_h264_dpb_flush (GstH264DPB * dpb, gboolean output)
{
GstFlowReturn ret;
guint i;
GST_DEBUG ("flush");
if (output)
while (gst_h264_dpb_bump (dpb, G_MAXUINT, &ret));
for (i = 0; i < dpb->n_frames; i++)
gst_video_codec_frame_unref (dpb->frames[i]->frame);
dpb->n_frames = 0;
}
void
gst_h264_dpb_mark_sliding (GstH264DPB * dpb)
{
GstH264Frame **frames;
guint i;
gint mark_idx = -1;
if (dpb->n_frames != dpb->max_frames)
return;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term) {
mark_idx = i;
break;
}
}
if (mark_idx != -1) {
for (i = mark_idx; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term &&
frames[i]->frame_idx < frames[mark_idx]->frame_idx)
mark_idx = i;
}
frames[mark_idx]->is_reference = FALSE;
if (!frames[mark_idx]->output_needed)
gst_h264_dpb_remove (dpb, mark_idx);
}
}
void
gst_h264_dpb_mark_long_term (GstH264DPB * dpb, guint16 pic_num,
guint16 long_term_frame_idx)
{
GstH264Frame **frames;
guint i;
gint mark_idx = -1;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term &&
frames[i]->frame_idx == pic_num) {
mark_idx = i;
break;
}
}
if (mark_idx != -1) {
frames[mark_idx]->is_long_term = TRUE;
frames[mark_idx]->frame_idx = long_term_frame_idx;
}
}
void
gst_h264_dpb_mark_short_term_unused (GstH264DPB * dpb, guint16 pic_num)
{
GstH264Frame **frames;
guint i;
gint mark_idx = -1;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && !frames[i]->is_long_term &&
frames[i]->frame_idx == pic_num) {
mark_idx = i;
break;
}
}
if (mark_idx != -1) {
frames[mark_idx]->is_reference = FALSE;
if (!frames[mark_idx]->output_needed)
gst_h264_dpb_remove (dpb, mark_idx);
}
}
void
gst_h264_dpb_mark_long_term_unused (GstH264DPB * dpb, guint16 long_term_pic_num)
{
GstH264Frame **frames;
guint i;
gint mark_idx = -1;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && frames[i]->is_long_term &&
frames[i]->frame_idx == long_term_pic_num) {
mark_idx = i;
break;
}
}
if (mark_idx != -1) {
frames[mark_idx]->is_reference = FALSE;
if (!frames[mark_idx]->output_needed)
gst_h264_dpb_remove (dpb, mark_idx);
}
}
void
gst_h264_dpb_mark_all_unused (GstH264DPB * dpb)
{
GstH264Frame **frames;
guint i;
frames = dpb->frames;
for (i = 0; i < dpb->n_frames; i++) {
frames[i]->is_reference = FALSE;
if (!frames[i]->output_needed) {
gst_h264_dpb_remove (dpb, i);
i--;
}
}
}
void
gst_h264_dpb_set_output_func (GstH264DPB * dpb, GstH264DPBOutputFunc func,
gpointer user_data)
{
g_return_if_fail (GST_IS_H264_DPB (dpb));
dpb->output = func;
dpb->user_data = user_data;
}
/* GObject vmethod implementations */
static void
gst_h264_dpb_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstH264DPB *dpb = GST_H264_DPB (object);
switch (property_id) {
case PROP_NUM_REF_FRAMES:
g_value_set_uint (value, dpb->max_frames);
break;
case PROP_MAX_LONGTERM_IDX:
g_value_set_int (value, dpb->max_longterm_frame_idx);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
static void
gst_h264_dpb_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstH264DPB *dpb = GST_H264_DPB (object);
switch (property_id) {
case PROP_NUM_REF_FRAMES:
{
GstFlowReturn ret;
guint i;
dpb->max_frames = g_value_get_uint (value);
for (i = dpb->n_frames; i > dpb->max_frames; i--)
gst_h264_dpb_bump (dpb, G_MAXUINT, &ret);
break;
}
case PROP_MAX_LONGTERM_IDX:
{
GstH264Frame **frames;
guint i;
dpb->max_longterm_frame_idx = g_value_get_int (value);
frames = dpb->frames;
for (i = dpb->n_frames; i < dpb->n_frames; i++) {
if (frames[i]->is_reference && frames[i]->is_long_term &&
frames[i]->frame_idx > dpb->max_longterm_frame_idx) {
frames[i]->is_reference = FALSE;
if (!frames[i]->output_needed) {
gst_h264_dpb_remove (dpb, i);
i--;
}
}
}
break;
}
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
static void
gst_h264_dpb_finalize (GObject * object)
{
GstH264DPB *dpb = GST_H264_DPB (object);
guint i;
for (i = 0; i < dpb->n_frames; i++)
gst_video_codec_frame_unref (dpb->frames[i]->frame);
G_OBJECT_CLASS (gst_h264_dpb_parent_class)->finalize (object);
}
static void
gst_h264_dpb_init (GstH264DPB * dpb)
{
dpb->n_frames = 0;
dpb->max_longterm_frame_idx = -1;
dpb->max_frames = MAX_DPB_SIZE;
}
static void
gst_h264_dpb_class_init (GstH264DPBClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gst_h264_dpb_finalize;
object_class->set_property = gst_h264_dpb_set_property;
object_class->get_property = gst_h264_dpb_get_property;
g_object_class_install_property (object_class, PROP_NUM_REF_FRAMES,
g_param_spec_uint ("num-ref-frames", "Num Ref Frames",
"How many reference frames the DPB should hold ",
0, 16, 16, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (object_class, PROP_MAX_LONGTERM_IDX,
g_param_spec_int ("max-longterm-frame-idx", "MaxLongTermFrameIDX",
"Maximum long-term frame index",
-1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}

View file

@ -1,98 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_H264_DPB_H_
#define _GST_H264_DPB_H_
#include <glib-object.h>
#include <vdpau/vdpau.h>
#include <gst/video/video.h>
#include <gst/codecparsers/gsth264meta.h>
G_BEGIN_DECLS
#define MAX_DPB_SIZE 16
#define GST_TYPE_H264_DPB (gst_h264_dpb_get_type ())
#define GST_H264_DPB(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_DPB, GstH264DPB))
#define GST_H264_DPB_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_DPB, GstH264DPBClass))
#define GST_IS_H264_DPB(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_DPB))
#define GST_IS_H264_DPB_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264_DPB))
#define GST_H264_DPB_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264_DPB, GstH264DPBClass))
typedef struct _GstH264DPB GstH264DPB;
typedef struct _GstH264DPBClass GstH264DPBClass;
typedef struct _GstH264Frame
{
GstVideoCodecFrame *frame;
guint poc;
guint16 frame_idx;
gboolean is_reference;
gboolean is_long_term;
gboolean output_needed;
} GstH264Frame;
typedef GstFlowReturn (*GstH264DPBOutputFunc) (GstH264DPB *dpb, GstH264Frame *h264_frame, gpointer user_data);
struct _GstH264DPB
{
GObject parent_instance;
/* private */
GstH264Frame *frames[MAX_DPB_SIZE];
guint n_frames;
guint max_frames;
gint max_longterm_frame_idx;
GstH264DPBOutputFunc output;
gpointer user_data;
};
struct _GstH264DPBClass
{
GObjectClass parent_class;
};
void
gst_h264_dpb_fill_reference_frames (GstH264DPB *dpb, VdpReferenceFrameH264 reference_frames[16]);
gboolean gst_h264_dpb_add (GstH264DPB *dpb, GstH264Frame *h264_frame);
void gst_h264_dpb_flush (GstH264DPB *dpb, gboolean output);
void gst_h264_dpb_mark_sliding (GstH264DPB *dpb);
void gst_h264_dpb_mark_long_term_unused (GstH264DPB *dpb, guint16 long_term_pic_num);
void gst_h264_dpb_mark_short_term_unused (GstH264DPB *dpb, guint16 pic_num);
void gst_h264_dpb_mark_all_unused (GstH264DPB *dpb);
void gst_h264_dpb_mark_long_term (GstH264DPB *dpb, guint16 pic_num, guint16 long_term_frame_idx);
void gst_h264_dpb_set_output_func (GstH264DPB *dpb, GstH264DPBOutputFunc func,
gpointer user_data);
GType gst_h264_dpb_get_type (void) G_GNUC_CONST;
G_END_DECLS
#endif /* _GST_H264_DPB_H_ */

View file

@ -1,581 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/codecparsers/gsth264parser.h>
#include <gst/codecparsers/gsth264meta.h>
#include <string.h>
#include "gstvdph264dec.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_h264_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_h264_dec_debug
static GstStaticPadTemplate sink_template =
GST_STATIC_PAD_TEMPLATE (GST_VIDEO_DECODER_SINK_NAME,
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264,stream-format=byte-stream,alignment=au")
);
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_h264_dec_debug, "vdpauh264dec", 0, \
"VDPAU h264 decoder");
#define gst_vdp_h264_dec_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpH264Dec, gst_vdp_h264_dec, GST_TYPE_VDP_DECODER,
DEBUG_INIT);
static GstFlowReturn
gst_vdp_h264_dec_output (GstH264DPB * dpb, GstH264Frame * h264_frame,
gpointer user_data)
{
GstVideoDecoder *video_decoder = (GstVideoDecoder *) user_data;
GST_DEBUG ("poc: %d", h264_frame->poc);
return gst_video_decoder_finish_frame (video_decoder, h264_frame->frame);
}
static guint
gst_vdp_h264_dec_calculate_poc (GstVdpH264Dec * h264_dec,
GstH264SliceHdr * slice)
{
GstH264SPS *seq;
guint poc = 0;
seq = slice->pps->sequence;
if (seq->pic_order_cnt_type == 0) {
guint32 max_poc_cnt_lsb = 1 << (seq->log2_max_pic_order_cnt_lsb_minus4 + 4);
if ((slice->pic_order_cnt_lsb < h264_dec->prev_poc_lsb) &&
((h264_dec->prev_poc_lsb - slice->pic_order_cnt_lsb) >=
(max_poc_cnt_lsb / 2)))
h264_dec->poc_msb = h264_dec->poc_msb + max_poc_cnt_lsb;
else if ((slice->pic_order_cnt_lsb > h264_dec->prev_poc_lsb) &&
((slice->pic_order_cnt_lsb - h264_dec->prev_poc_lsb) >
(max_poc_cnt_lsb / 2)))
h264_dec->poc_msb = h264_dec->poc_msb - max_poc_cnt_lsb;
poc = h264_dec->poc_msb + slice->pic_order_cnt_lsb;
h264_dec->prev_poc_lsb = slice->pic_order_cnt_lsb;
}
return poc;
}
static void
gst_vdp_h264_dec_init_frame_info (GstVdpH264Dec * h264_dec,
GstH264Frame * h264_frame, GstH264SliceHdr * slice)
{
h264_frame->poc = gst_vdp_h264_dec_calculate_poc (h264_dec, slice);
h264_frame->output_needed = TRUE;
h264_frame->is_long_term = FALSE;
h264_frame->frame_idx = slice->frame_num;
/* is reference */
if (slice->nalu_ref_idc == 0)
h264_frame->is_reference = FALSE;
else if (slice->slice_type == GST_H264_NAL_SLICE_IDR) {
h264_frame->is_reference = TRUE;
if (slice->dec_ref_pic_marking.long_term_reference_flag) {
h264_frame->is_long_term = TRUE;
h264_frame->frame_idx = 0;
}
} else {
h264_frame->is_reference = TRUE;
if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
GstH264RefPicMarking *marking;
guint i;
marking = slice->dec_ref_pic_marking.ref_pic_marking;
for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) {
if (marking[i].memory_management_control_operation == 6) {
h264_frame->is_long_term = TRUE;
h264_frame->frame_idx = marking[i].long_term_frame_idx;
break;
}
}
}
}
}
static GstFlowReturn
gst_vdp_h264_dec_idr (GstVdpH264Dec * h264_dec, GstVideoCodecFrame * frame,
GstH264SliceHdr * slice)
{
GstH264SPS *seq;
GST_DEBUG_OBJECT (h264_dec, "Handling IDR slice");
h264_dec->poc_msb = 0;
h264_dec->prev_poc_lsb = 0;
if (slice->dec_ref_pic_marking.no_output_of_prior_pics_flag)
gst_h264_dpb_flush (h264_dec->dpb, FALSE);
else
gst_h264_dpb_flush (h264_dec->dpb, TRUE);
if (slice->dec_ref_pic_marking.long_term_reference_flag)
g_object_set (h264_dec->dpb, "max-longterm-frame-idx", 0, NULL);
else
g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL);
seq = slice->pps->sequence;
if (seq->id != h264_dec->current_sps) {
GstVideoCodecState *state;
VdpDecoderProfile profile;
GstFlowReturn ret;
GST_DEBUG_OBJECT (h264_dec, "Sequence changed !");
state =
gst_video_decoder_set_output_state (GST_VIDEO_DECODER (h264_dec),
GST_VIDEO_FORMAT_YV12, seq->width, seq->height, h264_dec->input_state);
/* calculate framerate if we haven't got one */
if (state->info.fps_n == 0) {
state->info.fps_n = seq->fps_num;
state->info.fps_d = seq->fps_den;
}
if (state->info.par_n == 0 && seq->vui_parameters_present_flag) {
state->info.par_n = seq->vui_parameters.par_n;
state->info.par_d = seq->vui_parameters.par_d;
}
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (h264_dec)))
goto nego_fail;
switch (seq->profile_idc) {
case 66:
profile = VDP_DECODER_PROFILE_H264_BASELINE;
break;
case 77:
profile = VDP_DECODER_PROFILE_H264_MAIN;
break;
case 100:
profile = VDP_DECODER_PROFILE_H264_HIGH;
break;
default:
goto profile_not_suported;
}
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (h264_dec), profile,
seq->num_ref_frames, h264_dec->input_state);
if (ret != GST_FLOW_OK)
return ret;
g_object_set (h264_dec->dpb, "num-ref-frames", seq->num_ref_frames, NULL);
h264_dec->current_sps = seq->id;
}
return GST_FLOW_OK;
profile_not_suported:
{
GST_ELEMENT_ERROR (h264_dec, STREAM, WRONG_TYPE,
("vdpauh264dec doesn't support this streams profile"),
("profile_idc: %d", seq->profile_idc));
return GST_FLOW_ERROR;
}
nego_fail:
{
GST_ERROR_OBJECT (h264_dec, "Negotiation failed");
return GST_FLOW_NOT_NEGOTIATED;
}
}
static void
gst_vdp_h264_dec_fill_info (VdpPictureInfoH264 * info, GstVdpH264Dec * h264_dec,
GstH264Frame * h264_frame, GstH264SliceHdr * slice)
{
GstH264PPS *pic;
GstH264SPS *seq;
pic = slice->pps;
seq = pic->sequence;
GST_DEBUG_OBJECT (h264_dec, "Filling info");
/* FIXME: we only handle frames for now */
info->field_order_cnt[0] = h264_frame->poc;
info->field_order_cnt[1] = h264_frame->poc;
info->is_reference = h264_frame->is_reference;
info->frame_num = slice->frame_num;
info->field_pic_flag = slice->field_pic_flag;
info->bottom_field_flag = slice->bottom_field_flag;
info->num_ref_idx_l0_active_minus1 = slice->num_ref_idx_l0_active_minus1;
info->num_ref_idx_l1_active_minus1 = slice->num_ref_idx_l1_active_minus1;
info->num_ref_frames = seq->num_ref_frames;
info->mb_adaptive_frame_field_flag = seq->mb_adaptive_frame_field_flag;
info->frame_mbs_only_flag = seq->frame_mbs_only_flag;
info->log2_max_frame_num_minus4 = seq->log2_max_frame_num_minus4;
info->pic_order_cnt_type = seq->pic_order_cnt_type;
info->log2_max_pic_order_cnt_lsb_minus4 =
seq->log2_max_pic_order_cnt_lsb_minus4;
info->delta_pic_order_always_zero_flag =
seq->delta_pic_order_always_zero_flag;
info->direct_8x8_inference_flag = seq->direct_8x8_inference_flag;
info->constrained_intra_pred_flag = pic->constrained_intra_pred_flag;
info->weighted_pred_flag = pic->weighted_pred_flag;
info->weighted_bipred_idc = pic->weighted_bipred_idc;
info->transform_8x8_mode_flag = pic->transform_8x8_mode_flag;
info->chroma_qp_index_offset = pic->chroma_qp_index_offset;
info->second_chroma_qp_index_offset = pic->second_chroma_qp_index_offset;
info->pic_init_qp_minus26 = pic->pic_init_qp_minus26;
info->entropy_coding_mode_flag = pic->entropy_coding_mode_flag;
info->pic_order_present_flag = pic->pic_order_present_flag;
info->deblocking_filter_control_present_flag =
pic->deblocking_filter_control_present_flag;
info->redundant_pic_cnt_present_flag = pic->redundant_pic_cnt_present_flag;
memcpy (&info->scaling_lists_4x4, &pic->scaling_lists_4x4, 96);
memcpy (&info->scaling_lists_8x8, &pic->scaling_lists_8x8, 128);
gst_h264_dpb_fill_reference_frames (h264_dec->dpb, info->referenceFrames);
}
static VdpBitstreamBuffer *
gst_vdp_h264_dec_create_bitstream_buffers (GstVdpH264Dec * h264_dec,
GstH264Meta * meta, GstMapInfo * info)
{
VdpBitstreamBuffer *bufs;
guint i;
gsize offset = 0;
bufs = g_new (VdpBitstreamBuffer, meta->num_slices);
for (i = 0; i < meta->num_slices; i++) {
bufs[i].bitstream = info->data + offset;
if (i == meta->num_slices)
offset = info->size;
else
offset = meta->slice_offsets[i + 1];
bufs[i].bitstream_bytes = offset - meta->slice_offsets[i];
bufs[i].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
}
return bufs;
}
static GstFlowReturn
gst_vdp_h264_dec_handle_dpb (GstVdpH264Dec * h264_dec,
GstH264Frame * h264_frame, GstH264SliceHdr * slice)
{
if (slice->nalu_ref_idc != 0 && slice->slice_type != GST_H264_NAL_SLICE_IDR) {
if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
GstH264RefPicMarking *marking;
guint i;
marking = slice->dec_ref_pic_marking.ref_pic_marking;
for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) {
switch (marking[i].memory_management_control_operation) {
case 1:
{
guint16 pic_num;
pic_num = slice->frame_num -
(marking[i].difference_of_pic_nums_minus1 + 1);
gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num);
break;
}
case 2:
{
gst_h264_dpb_mark_long_term_unused (h264_dec->dpb,
marking[i].long_term_pic_num);
break;
}
case 3:
{
guint16 pic_num;
pic_num = slice->frame_num -
(marking[i].difference_of_pic_nums_minus1 + 1);
gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num,
marking[i].long_term_frame_idx);
break;
}
case 4:
{
g_object_set (h264_dec->dpb, "max-longterm-frame-idx",
marking[i].max_long_term_frame_idx_plus1 - 1, NULL);
break;
}
case 5:
{
gst_h264_dpb_mark_all_unused (h264_dec->dpb);
g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL);
break;
}
default:
break;
}
}
} else
gst_h264_dpb_mark_sliding (h264_dec->dpb);
}
return gst_h264_dpb_add (h264_dec->dpb, h264_frame);
}
static void
gst_h264_frame_free (GstH264Frame * frame)
{
g_slice_free (GstH264Frame, frame);
}
static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder,
GstVideoCodecFrame * frame)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
GstH264Meta *h264_meta;
GstH264Frame *h264_frame;
GList *tmp;
GstFlowReturn ret;
VdpPictureInfoH264 info;
VdpBitstreamBuffer *bufs;
GstH264SliceHdr *first_slice;
guint i;
GstMapInfo map;
GST_DEBUG ("handle_frame");
h264_meta = gst_buffer_get_h264_meta (frame->input_buffer);
if (G_UNLIKELY (h264_meta == NULL))
goto no_h264_meta;
if (G_UNLIKELY (h264_meta->num_slices == 0))
goto no_slices;
/* Handle PPS/SPS/SEI if present */
if (h264_meta->sps) {
for (tmp = h264_meta->sps; tmp; tmp = tmp->next) {
GstH264SPS *sps = (GstH264SPS *) tmp->data;
GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id);
h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps);
}
}
if (h264_meta->pps) {
for (tmp = h264_meta->pps; tmp; tmp = tmp->next) {
GstH264PPS *pps = (GstH264PPS *) tmp->data;
GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id);
h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps);
/* Adjust pps pointer */
h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id];
}
}
first_slice = &h264_meta->slices[0];
if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR)
goto no_idr;
/* Handle slices */
for (i = 0; i < h264_meta->num_slices; i++) {
GstH264SliceHdr *slice = &h264_meta->slices[i];
GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i);
slice->pps = h264_dec->pps[slice->pps_id];
}
if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) {
ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice);
if (ret == GST_FLOW_OK)
h264_dec->got_idr = TRUE;
else
goto skip_frame;
}
h264_frame = g_slice_new0 (GstH264Frame);
gst_video_codec_frame_set_user_data (frame, h264_frame,
(GDestroyNotify) gst_h264_frame_free);
gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice);
h264_frame->frame = frame;
gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice);
info.slice_count = h264_meta->num_slices;
if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
goto map_fail;
bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map);
ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec),
(VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame);
g_free (bufs);
gst_buffer_unmap (frame->input_buffer, &map);
if (ret != GST_FLOW_OK)
goto render_fail;
/* DPB handling */
return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice);
/* EARLY exit */
no_idr:
{
GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame");
return gst_video_decoder_finish_frame (video_decoder, frame);
}
skip_frame:
{
GST_DEBUG_OBJECT (video_decoder, "Skipping frame");
return gst_video_decoder_finish_frame (video_decoder, frame);
}
/* ERRORS */
no_h264_meta:
{
GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta");
return GST_FLOW_ERROR;
}
no_slices:
{
GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices");
return GST_FLOW_ERROR;
}
map_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ");
return GST_FLOW_ERROR;
}
render_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to render : %s",
gst_flow_get_name (ret));
gst_video_decoder_drop_frame (video_decoder, frame);
return ret;
}
}
static gboolean
gst_vdp_h264_dec_flush (GstVideoDecoder * video_decoder)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
h264_dec->got_idr = FALSE;
gst_h264_dpb_flush (h264_dec->dpb, FALSE);
return TRUE;
}
static gboolean
gst_vdp_h264_dec_start (GstVideoDecoder * video_decoder)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
h264_dec->got_idr = FALSE;
h264_dec->current_sps = -1;
h264_dec->got_idr = FALSE;
h264_dec->dpb = g_object_new (GST_TYPE_H264_DPB, NULL);
gst_h264_dpb_set_output_func (h264_dec->dpb, gst_vdp_h264_dec_output,
h264_dec);
return GST_VIDEO_DECODER_CLASS (parent_class)->start (video_decoder);
}
static gboolean
gst_vdp_h264_dec_stop (GstVideoDecoder * video_decoder)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
g_object_unref (h264_dec->dpb);
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (video_decoder);
}
static gboolean
gst_vdp_h264_dec_set_format (GstVideoDecoder * video_decoder,
GstVideoCodecState * state)
{
GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);
if (h264_dec->input_state)
gst_video_codec_state_unref (h264_dec->input_state);
h264_dec->input_state = gst_video_codec_state_ref (state);
GST_FIXME_OBJECT (video_decoder, "Do something when receiving input state ?");
return TRUE;
}
static void
gst_vdp_h264_dec_init (GstVdpH264Dec * h264_dec)
{
}
static void
gst_vdp_h264_dec_class_init (GstVdpH264DecClass * klass)
{
GstElementClass *element_class;
GstVideoDecoderClass *video_decoder_class;
element_class = GST_ELEMENT_CLASS (klass);
video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
gst_element_class_set_static_metadata (element_class,
"VDPAU H264 Decoder",
"Decoder",
"Decode h264 stream with vdpau",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_static_pad_template (element_class, &sink_template);
video_decoder_class->start = gst_vdp_h264_dec_start;
video_decoder_class->stop = gst_vdp_h264_dec_stop;
video_decoder_class->flush = gst_vdp_h264_dec_flush;
video_decoder_class->set_format = gst_vdp_h264_dec_set_format;
video_decoder_class->handle_frame = gst_vdp_h264_dec_handle_frame;
}

View file

@ -1,70 +0,0 @@
/* GStreamer
*
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_H264_DEC_H__
#define __GST_VDP_H264_DEC_H__
#include <gst/gst.h>
#include <gst/codecparsers/gsth264parser.h>
#include "../gstvdpdecoder.h"
#include "gsth264dpb.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_H264_DEC (gst_vdp_h264_dec_get_type())
#define GST_VDP_H264_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_H264_DEC,GstVdpH264Dec))
#define GST_VDP_H264_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_H264_DEC,GstVdpH264DecClass))
#define GST_VDP_H264_DEC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VDP_H264_DEC,GstVdpH264DecClass))
#define GST_IS_VDP_H264_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_H264_DEC))
#define GST_IS_VDP_H264_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_H264_DEC))
#define GST_VDP_H264_DEC_CAST(obj) ((GstVdpH264Dec *)(obj))
typedef struct _GstVdpH264Dec GstVdpH264Dec;
typedef struct _GstVdpH264DecClass GstVdpH264DecClass;
struct _GstVdpH264Dec {
GstVdpDecoder vdp_decoder;
GstH264DPB *dpb;
GstH264SPS *sps[GST_H264_MAX_SPS_COUNT];
GstH264PPS *pps[GST_H264_MAX_PPS_COUNT];
/* Current SPS being used. Default:-1 */
gint current_sps;
gboolean got_idr;
GstVideoCodecState *input_state;
guint poc_msb;
guint prev_poc_lsb;
};
struct _GstVdpH264DecClass {
GstVdpDecoderClass vdp_decoder_class;
};
GType gst_vdp_h264_dec_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_H264_DEC_H__ */

View file

@ -1,30 +0,0 @@
vdpau_sources = [
'gstvdpau.c',
'gstvdputils.c',
'gstvdpvideomemory.c',
'gstvdpvideobufferpool.c',
'gstvdpdevice.c',
'gstvdpdecoder.c',
'mpeg/gstvdpmpegdec.c',
# 'h264/gsth264dpb.c',
# 'h264/gstvdph264dec.c',
]
vdpau_dep = dependency('vdpau', required: get_option('vdpau'))
if vdpau_dep.found()
if x11_dep.found()
gstvdpau = library('gstvdpau',
vdpau_sources,
c_args: gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API'],
include_directories: [configinc],
dependencies: [gstbase_dep, gstvideo_dep, gstcodecparsers_dep, vdpau_dep, x11_dep, libm],
install: true,
install_dir: plugins_install_dir,
)
pkgconfig.generate(gstvdpau, install_dir: plugins_pkgconfig_install_dir)
plugins += [gstvdpau]
elif get_option('vdpau').enabled()
error('vdpau plugin was enabled but required X11 dependency was not found.')
endif
endif

View file

@ -1,557 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-vdpaumpegdec
*
* FIXME:Describe vdpaumpegdec here.
*
* ## Example launch line
*
* |[
* gst-launch-1.0 -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE
* ]|
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <gst/gst.h>
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <gst/codecparsers/gstmpegvideoparser.h>
#include <gst/codecparsers/gstmpegvideometa.h>
#include <string.h>
#include "gstvdpmpegdec.h"
#include "gstvdpvideomemory.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug
/* the capabilities of the inputs and outputs.
*
* describe the real formats here.
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], "
"systemstream = (boolean) false")
);
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, \
"VDPAU mpeg decoder");
#define gst_vdp_mpeg_dec_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstVdpMpegDec, gst_vdp_mpeg_dec, GST_TYPE_VDP_DECODER,
DEBUG_INIT);
static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
#define SYNC_CODE_SIZE 3
static VdpDecoderProfile
gst_vdp_mpeg_dec_get_profile (GstMpegVideoSequenceExt * hdr)
{
VdpDecoderProfile profile;
switch (hdr->profile) {
case GST_MPEG_VIDEO_PROFILE_SIMPLE:
profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
break;
default:
profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
break;
}
return profile;
}
static gboolean
gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec,
GstMpegVideoPictureExt * pic_ext, GstVideoCodecFrame * frame)
{
VdpPictureInfoMPEG1Or2 *info;
#if 0
gint fields;
#endif
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoPictureExt");
info = &mpeg_dec->vdp_info;
/* FIXME : Set defaults when pic_ext isn't present */
memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext->f_code, 4);
info->intra_dc_precision = pic_ext->intra_dc_precision;
info->picture_structure = pic_ext->picture_structure;
info->top_field_first = pic_ext->top_field_first;
info->frame_pred_frame_dct = pic_ext->frame_pred_frame_dct;
info->concealment_motion_vectors = pic_ext->concealment_motion_vectors;
info->q_scale_type = pic_ext->q_scale_type;
info->intra_vlc_format = pic_ext->intra_vlc_format;
info->alternate_scan = pic_ext->alternate_scan;
#if 0
fields = 2;
if (pic_ext->picture_structure == 3) {
if (mpeg_dec->stream_info.interlaced) {
if (pic_ext->progressive_frame == 0)
fields = 2;
if (pic_ext->progressive_frame == 0 && pic_ext->repeat_first_field == 0)
fields = 2;
if (pic_ext->progressive_frame == 1 && pic_ext->repeat_first_field == 1)
fields = 3;
} else {
if (pic_ext->repeat_first_field == 0)
fields = 2;
if (pic_ext->repeat_first_field == 1 && pic_ext->top_field_first == 0)
fields = 4;
if (pic_ext->repeat_first_field == 1 && pic_ext->top_field_first == 1)
fields = 6;
}
} else
fields = 1;
#endif
if (pic_ext->top_field_first)
GST_FIXME ("Set TFF on outgoing buffer");
#if 0
GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF);
#endif
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_handle_picture (GstVdpMpegDec * mpeg_dec,
GstMpegVideoPictureHdr * pic_hdr)
{
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoPictureHdr");
mpeg_dec->vdp_info.picture_coding_type = pic_hdr->pic_type;
if (mpeg_dec->stream_info.version == 1) {
mpeg_dec->vdp_info.full_pel_forward_vector =
pic_hdr->full_pel_forward_vector;
mpeg_dec->vdp_info.full_pel_backward_vector =
pic_hdr->full_pel_backward_vector;
memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr->f_code, 4);
}
mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr->tsn;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state)
{
GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) decoder;
/* FIXME : Check the hardware can handle the level/profile */
if (mpeg_dec->input_state)
gst_video_codec_state_unref (mpeg_dec->input_state);
mpeg_dec->input_state = gst_video_codec_state_ref (state);
return TRUE;
}
#if 0
static gboolean
gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, const guint8 * data,
gsize size, guint offset)
{
GstMpegVideoGop gop;
GstClockTime time;
if (!gst_mpeg_video_parse_gop (&gop, data, size, offset))
return FALSE;
time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
mpeg_dec->gop_frame =
gst_util_uint64_scale (time, mpeg_dec->stream_info.fps_n,
mpeg_dec->stream_info.fps_d * GST_SECOND) + gop.frame;
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_GOP)
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
return TRUE;
}
#endif
static gboolean
gst_vdp_mpeg_dec_handle_quant_matrix (GstVdpMpegDec * mpeg_dec,
GstMpegVideoQuantMatrixExt * qm)
{
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoQuantMatrixExt");
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&qm->intra_quantiser_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&qm->non_intra_quantiser_matrix, 64);
return TRUE;
}
static GstFlowReturn
gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec,
GstMpegVideoSequenceHdr * hdr, GstMpegVideoSequenceExt * ext)
{
GstFlowReturn ret;
GstVideoDecoder *video_decoder = GST_VIDEO_DECODER (mpeg_dec);
GstVdpMpegStreamInfo stream_info;
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoSequenceHdr");
memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
&hdr->intra_quantizer_matrix, 64);
memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
&hdr->non_intra_quantizer_matrix, 64);
stream_info.width = hdr->width;
stream_info.height = hdr->height;
stream_info.fps_n = hdr->fps_n;
stream_info.fps_d = hdr->fps_d;
stream_info.par_n = hdr->par_w;
stream_info.par_d = hdr->par_h;
stream_info.interlaced = FALSE;
stream_info.version = 1;
stream_info.profile = VDP_DECODER_PROFILE_MPEG1;
if (ext) {
GST_DEBUG_OBJECT (mpeg_dec, "Handling GstMpegVideoSequenceExt");
/* FIXME : isn't this already processed by mpegvideoparse ? */
stream_info.fps_n *= (ext->fps_n_ext + 1);
stream_info.fps_d *= (ext->fps_d_ext + 1);
stream_info.width += (ext->horiz_size_ext << 12);
stream_info.height += (ext->vert_size_ext << 12);
stream_info.interlaced = !ext->progressive;
stream_info.version = 2;
stream_info.profile = gst_vdp_mpeg_dec_get_profile (ext);
}
GST_DEBUG_OBJECT (mpeg_dec, "Setting output state to %dx%d",
stream_info.width, stream_info.height);
mpeg_dec->output_state =
gst_video_decoder_set_output_state (video_decoder, GST_VIDEO_FORMAT_YV12,
stream_info.width, stream_info.height, mpeg_dec->input_state);
if (stream_info.interlaced)
mpeg_dec->output_state->info.interlace_mode =
GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
gst_video_decoder_negotiate (video_decoder);
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg_dec),
stream_info.profile, 2, mpeg_dec->output_state);
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA;
return ret;
}
static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstVideoDecoder * video_decoder,
GstVideoCodecFrame * frame)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
VdpPictureInfoMPEG1Or2 *info;
GstMpegVideoMeta *mpeg_meta;
GstVdpVideoMemory *vmem;
GstFlowReturn ret = GST_FLOW_OK;
VdpBitstreamBuffer vbit[1];
GstMapInfo mapinfo;
/* FIXME : Specify in sink query that we need the mpeg video meta */
/* Parse all incoming data from the frame */
mpeg_meta = gst_buffer_get_mpeg_video_meta (frame->input_buffer);
if (!mpeg_meta)
goto no_meta;
/* GST_MPEG_VIDEO_PACKET_SEQUENCE */
if (mpeg_meta->sequencehdr) {
ret =
gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_meta->sequencehdr,
mpeg_meta->sequenceext);
if (ret != GST_FLOW_OK)
goto sequence_parse_fail;
}
if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE)
goto need_sequence;
/* GST_MPEG_VIDEO_PACKET_PICTURE */
if (mpeg_meta->pichdr)
gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_meta->pichdr);
/* GST_MPEG_VIDEO_PACKET_EXT_PICTURE_CODING */
if (mpeg_meta->picext)
gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_meta->picext, frame);
/* GST_MPEG_VIDEO_PACKET_GOP */
/* if (mpeg_meta->gop) */
/* GST_FIXME_OBJECT (mpeg_dec, "Handle GOP !"); */
/* gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame.gop); */
/* GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX */
if (mpeg_meta->quantext)
gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_meta->quantext);
info = &mpeg_dec->vdp_info;
info->slice_count = mpeg_meta->num_slices;
GST_DEBUG_OBJECT (mpeg_dec, "picture coding type %d",
info->picture_coding_type);
/* check if we can decode the frame */
if (info->picture_coding_type != GST_MPEG_VIDEO_PICTURE_TYPE_I
&& info->backward_reference == VDP_INVALID_HANDLE)
goto need_i_frame;
if (info->picture_coding_type == GST_MPEG_VIDEO_PICTURE_TYPE_B
&& info->forward_reference == VDP_INVALID_HANDLE)
goto need_non_b_frame;
if (info->picture_coding_type != GST_MPEG_VIDEO_PICTURE_TYPE_B) {
if (info->backward_reference != VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec, "Pushing B frame");
ret = gst_video_decoder_finish_frame (video_decoder, mpeg_dec->b_frame);
}
if (info->forward_reference != VDP_INVALID_HANDLE) {
GST_DEBUG_OBJECT (mpeg_dec, "Releasing no-longer needed forward frame");
gst_video_codec_frame_unref (mpeg_dec->f_frame);
info->forward_reference = VDP_INVALID_HANDLE;
}
info->forward_reference = info->backward_reference;
mpeg_dec->f_frame = mpeg_dec->b_frame;
info->backward_reference = VDP_INVALID_HANDLE;
}
if (ret != GST_FLOW_OK)
goto exit_after_b_frame;
/* decode */
if (!gst_buffer_map (frame->input_buffer, &mapinfo, GST_MAP_READ))
goto map_fail;
vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
vbit[0].bitstream = mapinfo.data + mpeg_meta->slice_offset;
vbit[0].bitstream_bytes = mapinfo.size - mpeg_meta->slice_offset;
ret = gst_vdp_decoder_render (GST_VDP_DECODER (mpeg_dec),
(VdpPictureInfo *) info, 1, vbit, frame);
gst_buffer_unmap (frame->input_buffer, &mapinfo);
if (ret != GST_FLOW_OK)
goto render_fail;
vmem = (GstVdpVideoMemory *) gst_buffer_get_memory (frame->output_buffer, 0);
if (info->picture_coding_type == GST_MPEG_VIDEO_PICTURE_TYPE_B) {
ret = gst_video_decoder_finish_frame (video_decoder, frame);
} else {
info->backward_reference = vmem->surface;
mpeg_dec->b_frame = gst_video_codec_frame_ref (frame);
}
return ret;
/* EARLY EXIT */
need_sequence:
{
GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a "
"GST_MPEG_VIDEO_PACKET_SEQUENCE yet");
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
need_i_frame:
{
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got an I_FRAME yet");
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
need_non_b_frame:
{
GST_DEBUG_OBJECT (mpeg_dec,
"Drop frame since we haven't got two non B_FRAME yet");
gst_video_decoder_finish_frame (video_decoder, frame);
return GST_FLOW_OK;
}
/* ERRORS */
no_meta:
{
GST_ERROR_OBJECT (video_decoder,
"Input buffer does not have MpegVideo GstMeta");
gst_video_decoder_drop_frame (video_decoder, frame);
return GST_FLOW_ERROR;
}
sequence_parse_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to handle sequence header");
gst_video_decoder_finish_frame (video_decoder, frame);
return ret;
}
exit_after_b_frame:
{
GST_WARNING_OBJECT (video_decoder, "Leaving after pushing B frame");
gst_video_decoder_finish_frame (video_decoder, frame);
return ret;
}
map_fail:
{
GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer");
gst_video_decoder_drop_frame (video_decoder, frame);
return GST_FLOW_ERROR;
}
render_fail:
{
GST_ERROR_OBJECT (video_decoder, "Error when rendering the frame");
gst_video_decoder_drop_frame (video_decoder, frame);
return ret;
}
}
static gboolean
gst_vdp_mpeg_dec_flush (GstVideoDecoder * video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
gst_video_codec_frame_unref (mpeg_dec->f_frame);
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
gst_video_codec_frame_unref (mpeg_dec->b_frame);
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
mpeg_dec->prev_packet = -1;
return TRUE;
}
static gboolean
gst_vdp_mpeg_dec_start (GstVideoDecoder * video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
GST_DEBUG_OBJECT (video_decoder, "Starting");
gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
mpeg_dec->decoder = VDP_INVALID_HANDLE;
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE;
memset (&mpeg_dec->stream_info, 0, sizeof (GstVdpMpegStreamInfo));
return GST_VIDEO_DECODER_CLASS (parent_class)->start (video_decoder);
}
static gboolean
gst_vdp_mpeg_dec_stop (GstVideoDecoder * video_decoder)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (video_decoder);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE;
mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE;
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (video_decoder);
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
{
GstElementClass *element_class;
GstVideoDecoderClass *video_decoder_class;
element_class = GST_ELEMENT_CLASS (klass);
video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
gst_element_class_set_static_metadata (element_class,
"VDPAU Mpeg Decoder",
"Decoder",
"Decode mpeg stream with vdpau",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_static_pad_template (element_class, &sink_template);
video_decoder_class->start = gst_vdp_mpeg_dec_start;
video_decoder_class->stop = gst_vdp_mpeg_dec_stop;
video_decoder_class->flush = gst_vdp_mpeg_dec_flush;
video_decoder_class->handle_frame = gst_vdp_mpeg_dec_handle_frame;
video_decoder_class->set_format = gst_vdp_mpeg_dec_set_format;
}
static void
gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
{
vdp_info->forward_reference = VDP_INVALID_HANDLE;
vdp_info->backward_reference = VDP_INVALID_HANDLE;
vdp_info->slice_count = 0;
vdp_info->picture_structure = 3;
vdp_info->picture_coding_type = 0;
vdp_info->intra_dc_precision = 0;
vdp_info->frame_pred_frame_dct = 1;
vdp_info->concealment_motion_vectors = 0;
vdp_info->intra_vlc_format = 0;
vdp_info->alternate_scan = 0;
vdp_info->q_scale_type = 0;
vdp_info->top_field_first = 1;
}
static void
gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec)
{
}

View file

@ -1,91 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_MPEG_DEC_H__
#define __GST_VDP_MPEG_DEC_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include "../gstvdpdecoder.h"
G_BEGIN_DECLS
typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo;
struct _GstVdpMpegStreamInfo
{
gint width, height;
gint fps_n, fps_d;
gint par_n, par_d;
gboolean interlaced;
gint version;
VdpDecoderProfile profile;
};
#define GST_TYPE_VDP_MPEG_DEC (gst_vdp_mpeg_dec_get_type())
#define GST_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDec))
#define GST_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDecClass))
#define GST_IS_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_MPEG_DEC))
#define GST_IS_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG_DEC))
typedef enum {
GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE,
GST_VDP_MPEG_DEC_STATE_NEED_GOP,
GST_VDP_MPEG_DEC_STATE_NEED_DATA
} GstVdpMpegDecState;
typedef struct _GstVdpMpegDec GstVdpMpegDec;
typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass;
struct _GstVdpMpegDec
{
GstVdpDecoder vdp_decoder;
VdpDecoder decoder;
GstVdpMpegStreamInfo stream_info;
/* decoder state */
GstVideoCodecState *input_state;
GstVideoCodecState *output_state;
GstVdpMpegDecState state;
gint prev_packet;
/* currently decoded frame info */
VdpPictureInfoMPEG1Or2 vdp_info;
guint64 frame_nr;
/* frame_nr from GOP */
guint64 gop_frame;
/* forward and backward reference */
GstVideoCodecFrame *f_frame, *b_frame;
};
struct _GstVdpMpegDecClass
{
GstVdpDecoderClass vdp_decoder_class;
};
GType gst_vdp_mpeg_dec_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_MPEG_DEC_H__ */

View file

@ -1,109 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstmpeg4frame.h"
GST_DEBUG_CATEGORY_STATIC (gst_mpeg4_frame_debug);
#define GST_CAT_DEFAULT gst_mpeg4_frame_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_mpeg4_frame_debug, "gstmpeg4frame", 0, "Mpeg4 Frame");
GstMpeg4Frame *
gst_mpeg4_frame_new (void)
{
GstMpeg4Frame *frame;
frame = (GstMpeg4Frame *) gst_mini_object_new (GST_TYPE_MPEG4_FRAME);
return frame;
}
static GObjectClass *gst_mpeg4_frame_parent_class;
static void
gst_mpeg4_frame_finalize (GstMpeg4Frame * mpeg4_frame)
{
if (mpeg4_frame->vos_buf)
gst_buffer_unref (mpeg4_frame->vos_buf);
if (mpeg4_frame->vo_buf)
gst_buffer_unref (mpeg4_frame->vo_buf);
if (mpeg4_frame->vol_buf)
gst_buffer_unref (mpeg4_frame->vol_buf);
if (mpeg4_frame->gov_buf)
gst_buffer_unref (mpeg4_frame->gov_buf);
if (mpeg4_frame->vop_buf)
gst_buffer_unref (mpeg4_frame->vop_buf);
GST_MINI_OBJECT_CLASS (gst_mpeg4_frame_parent_class)->finalize
(GST_MINI_OBJECT (mpeg4_frame));
}
static void
gst_mpeg4_frame_init (GstMpeg4Frame * mpeg4_frame, gpointer g_class)
{
}
static void
gst_mpeg4_frame_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_mpeg4_frame_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_mpeg4_frame_finalize;
}
GType
gst_mpeg4_frame_get_type (void)
{
static GType _gst_mpeg4_frame_type = 0;
if (G_UNLIKELY (_gst_mpeg4_frame_type == 0)) {
static const GTypeInfo info = {
sizeof (GstMpeg4FrameClass),
NULL,
NULL,
gst_mpeg4_frame_class_init,
NULL,
NULL,
sizeof (GstMpeg4Frame),
0,
(GInstanceInitFunc) gst_mpeg4_frame_init,
NULL
};
_gst_mpeg4_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME,
"GstMpeg4Frame", &info, 0);
DEBUG_INIT ();
}
return _gst_mpeg4_frame_type;
}

View file

@ -1,45 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef _GST_MPEG4_FRAME_H_
#define _GST_MPEG4_FRAME_H_
#include <gst/gst.h>
#include "mpeg4util.h"
#define GST_MPEG4_FRAME_GOT_PRIMARY GST_VIDEO_FRAME_FLAG_LAST
typedef struct _GstMpeg4Frame GstMpeg4Frame;
struct _GstMpeg4Frame
{
GstBuffer *vos_buf;
GstBuffer *vo_buf;
GstBuffer *vol_buf;
GstBuffer *gov_buf;
GstBuffer *vop_buf;
guint32 vop_time;
};
GstMpeg4Frame *gst_mpeg4_frame_new (void);
#endif

View file

@ -1,476 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-vdpaumpeg4dec
*
* FIXME:Describe vdpaumpeg4dec here.
*
* ## Example launch line
*
* |[
* gst-launch-1.0 -v -m fakesrc ! vdpaumpeg4dec ! fakesink silent=TRUE
* ]|
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <math.h>
#include <gst/gst.h>
#include <vdpau/vdpau.h>
#include <string.h>
#include "gstvdpmpeg4dec.h"
GST_DEBUG_CATEGORY (gst_vdp_mpeg4_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg4_dec_debug
/* the capabilities of the inputs and outputs.
*
* describe the real formats here.
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) 4, "
"systemstream = (boolean) false; "
"video/x-divx, divxversion = (int) [4, 5]; " "video/x-xvid"));
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg4_dec_debug, "vdpaumpeg4dec", 0, \
"VDPAU mpeg4 decoder");
G_DEFINE_TYPE_FULL (GstVdpMpeg4Dec, gst_vdp_mpeg4_dec, GST_TYPE_VDP_DECODER,
DEBUG_INIT);
#define SYNC_CODE_SIZE 3
static VdpPictureInfoMPEG4Part2
gst_vdp_mpeg4_dec_fill_info (GstVdpMpeg4Dec * mpeg4_dec,
GstMpeg4Frame * mpeg4_frame, Mpeg4VideoObjectPlane * vop)
{
Mpeg4VideoObjectLayer *vol;
VdpPictureInfoMPEG4Part2 info;
vol = &mpeg4_dec->vol;
info.forward_reference = VDP_INVALID_HANDLE;
info.backward_reference = VDP_INVALID_HANDLE;
/* forward reference */
if (vop->coding_type != I_VOP && mpeg4_dec->f_frame) {
info.forward_reference =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->f_frame)->
src_buffer)->surface;
}
if (vop->coding_type == B_VOP) {
guint32 trd_time, trb_time;
trd_time = mpeg4_dec->b_frame->vop_time - mpeg4_dec->f_frame->vop_time;
trb_time = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;
info.trd[0] = trd_time;
info.trb[0] = trb_time;
info.trd[1] = round ((double) trd_time / (double) mpeg4_dec->tframe);
info.trb[1] = round ((double) trb_time / (double) mpeg4_dec->tframe);
/* backward reference */
if (mpeg4_dec->b_frame) {
info.backward_reference =
GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->b_frame)->
src_buffer)->surface;
}
}
memcpy (info.intra_quantizer_matrix, vol->intra_quant_mat, 64);
memcpy (info.non_intra_quantizer_matrix, vol->non_intra_quant_mat, 64);
info.vop_time_increment_resolution = vol->vop_time_increment_resolution;
info.resync_marker_disable = vol->resync_marker_disable;
info.interlaced = vol->interlaced;
info.quant_type = vol->quant_type;
info.quarter_sample = vol->quarter_sample;
/* FIXME: support short video header */
info.short_video_header = FALSE;
info.vop_coding_type = vop->coding_type;
info.vop_fcode_forward = vop->fcode_forward;
info.vop_fcode_backward = vop->fcode_backward;
info.rounding_control = vop->rounding_type;
info.alternate_vertical_scan_flag = vop->alternate_vertical_scan_flag;
info.top_field_first = vop->top_field_first;
return info;
}
static gboolean
gst_vdp_mpeg4_dec_handle_configuration (GstVdpMpeg4Dec * mpeg4_dec,
GstMpeg4Frame * mpeg4_frame)
{
Mpeg4VisualObjectSequence vos;
Mpeg4VisualObject vo;
Mpeg4VideoObjectLayer vol;
GstVideoState state;
guint8 profile_indication;
VdpDecoderProfile profile;
GstFlowReturn ret;
if (mpeg4_dec->is_configured)
return GST_FLOW_OK;
if (!mpeg4_frame->vos_buf || !mpeg4_frame->vo_buf || !mpeg4_frame->vol_buf)
goto skip_frame;
if (!mpeg4_util_parse_VOS (mpeg4_frame->vos_buf, &vos))
goto skip_frame;
if (!mpeg4_util_parse_VO (mpeg4_frame->vo_buf, &vo))
goto skip_frame;
if (!mpeg4_util_parse_VOL (mpeg4_frame->vol_buf, &vo, &vol))
goto skip_frame;
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (mpeg4_dec));
state.width = vol.width;
state.height = vol.height;
if (vol.fixed_vop_rate) {
state.fps_n = vol.vop_time_increment_resolution;
state.fps_d = vol.fixed_vop_time_increment;
}
state.par_n = vol.par_n;
state.par_d = vol.par_d;
gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (mpeg4_dec), state);
profile_indication = vos.profile_and_level_indication >> 4;
switch (profile_indication) {
case 0x0:
profile = VDP_DECODER_PROFILE_MPEG4_PART2_SP;
break;
case 0xf:
profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP;
break;
default:
goto unsupported_profile;
}
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg4_dec), profile, 2);
if (ret != GST_FLOW_OK)
return ret;
mpeg4_dec->vol = vol;
mpeg4_dec->is_configured = TRUE;
return GST_FLOW_OK;
skip_frame:
GST_WARNING ("Skipping frame since we're not configured yet");
gst_base_video_decoder_skip_frame (GST_BASE_VIDEO_DECODER (mpeg4_dec),
GST_VIDEO_FRAME (mpeg4_frame));
return GST_FLOW_CUSTOM_ERROR;
unsupported_profile:
GST_ELEMENT_ERROR (mpeg4_dec, STREAM, WRONG_TYPE,
("vdpaumpeg4dec doesn't support this streams profile"),
("profile_and_level_indication: %d", vos.profile_and_level_indication));
return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_vdp_mpeg4_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame, GstClockTimeDiff deadline)
{
GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder);
GstMpeg4Frame *mpeg4_frame;
GstFlowReturn ret;
Mpeg4VideoObjectLayer *vol;
Mpeg4VideoObjectPlane vop;
VdpPictureInfoMPEG4Part2 info;
VdpBitstreamBuffer bufs[1];
GstVdpVideoBuffer *video_buf;
mpeg4_frame = GST_MPEG4_FRAME (frame);
ret = gst_vdp_mpeg4_dec_handle_configuration (mpeg4_dec, mpeg4_frame);
if (ret != GST_FLOW_OK)
return ret;
vol = &mpeg4_dec->vol;
if (!mpeg4_util_parse_VOP (mpeg4_frame->vop_buf, vol, &vop)) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return GST_FLOW_CUSTOM_ERROR;
}
/* calculate vop time */
mpeg4_frame->vop_time =
vop.modulo_time_base * vol->vop_time_increment_resolution +
vop.time_increment;
if (mpeg4_dec->tframe == -1 && vop.coding_type == B_VOP)
mpeg4_dec->tframe = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;
if (vop.coding_type != B_VOP) {
if (mpeg4_dec->b_frame) {
ret = gst_base_video_decoder_finish_frame (base_video_decoder,
GST_VIDEO_FRAME_CAST (mpeg4_dec->b_frame));
if (mpeg4_dec->f_frame)
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (mpeg4_dec->f_frame));
mpeg4_dec->f_frame = mpeg4_dec->b_frame;
mpeg4_dec->b_frame = NULL;
}
}
info = gst_vdp_mpeg4_dec_fill_info (mpeg4_dec, mpeg4_frame, &vop);
bufs[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
bufs[0].bitstream = GST_BUFFER_DATA (mpeg4_frame->vop_buf);
bufs[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg4_frame->vop_buf);
ret = gst_vdp_decoder_render (GST_VDP_DECODER (base_video_decoder),
(VdpPictureInfo *) & info, 1, bufs, &video_buf);
if (ret != GST_FLOW_OK) {
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
}
frame->src_buffer = GST_BUFFER_CAST (video_buf);
if (vop.coding_type == B_VOP)
ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame);
else {
gst_video_frame_ref (GST_VIDEO_FRAME_CAST (mpeg4_frame));
mpeg4_dec->b_frame = mpeg4_frame;
ret = GST_FLOW_OK;
}
return ret;
}
static GstFlowReturn
gst_vdp_mpeg4_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 start_code;
GstMpeg4Frame *mpeg4_frame;
GstFlowReturn ret = GST_FLOW_OK;
/* start code prefix */
SKIP (&reader, 24);
/* start_code */
READ_UINT8 (&reader, start_code, 8);
mpeg4_frame = GST_MPEG4_FRAME_CAST (frame);
/* collect packages */
if (start_code == MPEG4_PACKET_VOS) {
if (mpeg4_frame->vop_buf)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg4_frame);
gst_buffer_replace (&mpeg4_frame->vos_buf, buf);
}
else if (start_code == MPEG4_PACKET_EVOS) {
if (mpeg4_frame->vop_buf)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg4_frame);
}
else if (start_code == MPEG4_PACKET_VO)
gst_buffer_replace (&mpeg4_frame->vo_buf, buf);
else if (start_code >= MPEG4_PACKET_VOL_MIN &&
start_code <= MPEG4_PACKET_VOL_MAX)
gst_buffer_replace (&mpeg4_frame->vol_buf, buf);
else if (start_code == MPEG4_PACKET_GOV) {
if (mpeg4_frame->vop_buf)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg4_frame);
gst_buffer_replace (&mpeg4_frame->gov_buf, buf);
}
else if (start_code == MPEG4_PACKET_VOP) {
if (mpeg4_frame->vop_buf)
ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg4_frame);
mpeg4_frame->vop_buf = buf;
}
else
gst_buffer_unref (buf);
if (at_eos && mpeg4_frame->vop_buf)
ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE,
(GstVideoFrame **) & mpeg4_frame);
return ret;
error:
gst_buffer_unref (buf);
GST_WARNING ("error parsing packet");
return GST_FLOW_OK;
}
static gint
gst_vdp_mpeg4_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter)
{
gint m;
m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, 0,
gst_adapter_available (adapter));
if (m == -1)
return gst_adapter_available (adapter) - SYNC_CODE_SIZE;
return m;
}
static GstBaseVideoDecoderScanResult
gst_vdp_mpeg4_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder,
GstAdapter * adapter, guint * size, gboolean at_eos)
{
guint8 *data;
guint32 sync_code;
data = g_slice_alloc (SYNC_CODE_SIZE);
gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE);
sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]);
if (sync_code != 0x000001)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC;
*size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE);
if (*size == -1)
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA;
return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK;
}
static GstVideoFrame *
gst_vdp_mpeg4_dec_create_frame (GstBaseVideoDecoder * base_video_decoder)
{
return GST_VIDEO_FRAME_CAST (gst_mpeg4_frame_new ());
}
static gboolean
gst_vdp_mpeg4_dec_flush (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder);
if (mpeg4_dec->b_frame) {
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (mpeg4_dec->b_frame));
mpeg4_dec->b_frame = NULL;
}
if (mpeg4_dec->f_frame) {
gst_video_frame_unref (GST_VIDEO_FRAME_CAST (mpeg4_dec->f_frame));
mpeg4_dec->f_frame = NULL;
}
return TRUE;
}
static gboolean
gst_vdp_mpeg4_dec_start (GstBaseVideoDecoder * base_video_decoder)
{
GstVdpMpeg4Dec *mpeg4_dec = GST_VDP_MPEG4_DEC (base_video_decoder);
mpeg4_dec->is_configured = FALSE;
mpeg4_dec->tframe = -1;
mpeg4_dec->b_frame = NULL;
mpeg4_dec->f_frame = NULL;
return GST_BASE_VIDEO_DECODER_CLASS
(parent_class)->start (base_video_decoder);
}
static gboolean
gst_vdp_mpeg4_dec_stop (GstBaseVideoDecoder * base_video_decoder)
{
return GST_BASE_VIDEO_DECODER_CLASS (parent_class)->stop (base_video_decoder);
}
static void
gst_vdp_mpeg4_dec_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_static_metadata (element_class,
"VDPAU Mpeg4 Decoder",
"Decoder",
"Decode mpeg4 stream with vdpau",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_static_pad_template (element_class, &sink_template);
}
/* initialize the vdpaumpeg4decoder's class */
static void
gst_vdp_mpeg4_dec_class_init (GstVdpMpeg4DecClass * klass)
{
GstBaseVideoDecoderClass *base_video_decoder_class;
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
base_video_decoder_class->start = gst_vdp_mpeg4_dec_start;
base_video_decoder_class->stop = gst_vdp_mpeg4_dec_stop;
base_video_decoder_class->flush = gst_vdp_mpeg4_dec_flush;
base_video_decoder_class->create_frame = gst_vdp_mpeg4_dec_create_frame;
base_video_decoder_class->scan_for_sync = gst_vdp_mpeg4_dec_scan_for_sync;
base_video_decoder_class->scan_for_packet_end =
gst_vdp_mpeg4_dec_scan_for_packet_end;
base_video_decoder_class->parse_data = gst_vdp_mpeg4_dec_parse_data;
base_video_decoder_class->handle_frame = gst_vdp_mpeg4_dec_handle_frame;
}
static void
gst_vdp_mpeg4_dec_init (GstVdpMpeg4Dec * mpeg4_dec,
GstVdpMpeg4DecClass * gclass)
{
}

View file

@ -1,62 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VDP_MPEG4_DEC_H__
#define __GST_VDP_MPEG4_DEC_H__
#include <gst/gst.h>
#include "../gstvdpdecoder.h"
#include "mpeg4util.h"
#include "gstmpeg4frame.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_MPEG4_DEC (gst_vdp_mpeg4_dec_get_type())
#define GST_VDP_MPEG4_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_MPEG4_DEC,GstVdpMpeg4Dec))
#define GST_VDP_MPEG4_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_MPEG4_DEC,GstVdpMpeg4DecClass))
#define GST_IS_VDP_MPEG4_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_MPEG4_DEC))
#define GST_IS_VDP_MPEG4_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG4_DEC))
typedef struct _GstVdpMpeg4Dec GstVdpMpeg4Dec;
typedef struct _GstVdpMpeg4DecClass GstVdpMpeg4DecClass;
struct _GstVdpMpeg4Dec
{
GstVdpDecoder vdp_decoder;
gboolean is_configured;
Mpeg4VideoObjectLayer vol;
guint32 tframe;
GstMpeg4Frame *f_frame, *b_frame;
};
struct _GstVdpMpeg4DecClass
{
GstVdpDecoderClass vdp_decoder_class;
};
GType gst_vdp_mpeg4_dec_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_MPEG4_DEC_H__ */

View file

@ -1,473 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include "mpeg4util.h"
GST_DEBUG_CATEGORY_EXTERN (gst_vdp_mpeg4_dec_debug);
#define GST_CAT_DEFAULT gst_vdp_mpeg4_dec_debug
const guint8 default_intra_quant_mat[64] = {
8, 17, 18, 19, 21, 23, 25, 27,
17, 18, 19, 21, 23, 25, 27, 28,
20, 21, 22, 23, 24, 26, 28, 30,
21, 22, 23, 24, 26, 28, 30, 32,
22, 23, 24, 26, 28, 30, 32, 35,
23, 24, 26, 28, 30, 32, 35, 38,
25, 26, 28, 30, 32, 35, 38, 41,
27, 28, 30, 32, 35, 38, 41, 45
};
const guint8 default_non_intra_quant_mat[64] = {
16, 17, 18, 19, 20, 21, 22, 23,
17, 18, 19, 20, 21, 22, 23, 24,
18, 19, 20, 21, 22, 23, 24, 25,
19, 20, 21, 22, 23, 24, 26, 27,
20, 21, 22, 23, 25, 26, 27, 28,
21, 22, 23, 24, 26, 27, 28, 30,
22, 23, 24, 26, 27, 28, 30, 31,
23, 24, 25, 27, 28, 30, 31, 33,
};
const guint8 mpeg4_zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63
};
gboolean
mpeg4_util_parse_VOP (GstBuffer * buf, Mpeg4VideoObjectLayer * vol,
Mpeg4VideoObjectPlane * vop)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 vop_start_code;
guint8 modulo_time_base;
/* set default values */
vop->modulo_time_base = 0;
vop->rounding_type = 0;
vop->top_field_first = 1;
vop->alternate_vertical_scan_flag = 0;
vop->fcode_forward = 1;
vop->fcode_backward = 1;
/* start code prefix */
SKIP (&reader, 24);
READ_UINT8 (&reader, vop_start_code, 8);
if (vop_start_code != MPEG4_PACKET_VOP)
goto wrong_start_code;
READ_UINT8 (&reader, vop->coding_type, 2);
READ_UINT8 (&reader, modulo_time_base, 1);
while (modulo_time_base) {
vop->modulo_time_base++;
READ_UINT8 (&reader, modulo_time_base, 1);
}
/* marker bit */
SKIP (&reader, 1);
READ_UINT16 (&reader, vop->time_increment, vol->vop_time_increment_bits);
/* marker bit */
SKIP (&reader, 1);
READ_UINT8 (&reader, vop->coded, 1);
if (!vop->coded)
return TRUE;
if (vop->coding_type == P_VOP)
READ_UINT8 (&reader, vop->rounding_type, 1);
READ_UINT8 (&reader, vop->intra_dc_vlc_thr, 3);
if (vol->interlaced) {
READ_UINT8 (&reader, vop->top_field_first, 1);
READ_UINT8 (&reader, vop->alternate_vertical_scan_flag, 1);
}
READ_UINT16 (&reader, vop->quant, vol->quant_precision);
if (vop->coding_type != I_VOP) {
READ_UINT8 (&reader, vop->fcode_forward, 3);
CHECK_ALLOWED (vop->fcode_forward, 1, 7);
}
if (vop->coding_type == B_VOP) {
READ_UINT8 (&reader, vop->fcode_backward, 3);
CHECK_ALLOWED (vop->fcode_backward, 1, 7);
}
return TRUE;
error:
GST_WARNING ("error parsing \"Video Object Plane\"");
return FALSE;
wrong_start_code:
GST_WARNING ("got buffer with wrong start code");
goto error;
}
gboolean
mpeg4_util_parse_GOV (GstBuffer * buf, Mpeg4GroupofVideoObjectPlane * gov)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 gov_start_code;
/* start code prefix */
SKIP (&reader, 24);
READ_UINT8 (&reader, gov_start_code, 8);
if (gov_start_code != MPEG4_PACKET_GOV)
goto wrong_start_code;
READ_UINT8 (&reader, gov->hours, 5);
READ_UINT8 (&reader, gov->minutes, 6);
/* marker bit */
SKIP (&reader, 1);
READ_UINT8 (&reader, gov->seconds, 6);
READ_UINT8 (&reader, gov->closed, 1);
READ_UINT8 (&reader, gov->broken_link, 1);
return TRUE;
error:
GST_WARNING ("error parsing \"Group of Video Object Plane\"");
return FALSE;
wrong_start_code:
GST_WARNING ("got buffer with wrong start code");
goto error;
}
static void
mpeg4_util_par_from_info (guint8 aspect_ratio_info, guint8 * par_n,
guint8 * par_d)
{
switch (aspect_ratio_info) {
case 0x02:
*par_n = 12;
*par_d = 11;
break;
case 0x03:
*par_n = 10;
*par_d = 11;
break;
case 0x04:
*par_n = 16;
*par_d = 11;
break;
case 0x05:
*par_n = 40;
*par_d = 33;
break;
case 0x01:
default:
*par_n = 1;
*par_d = 1;
}
}
static gboolean
mpeg4_util_parse_quant (GstBitReader * reader, guint8 quant_mat[64],
const guint8 default_quant_mat[64])
{
guint8 load_quant_mat;
READ_UINT8 (reader, load_quant_mat, 1);
if (load_quant_mat) {
guint i;
guint8 val;
val = 1;
for (i = 0; i < 64; i++) {
if (val != 0)
READ_UINT8 (reader, val, 8);
if (val == 0) {
if (i == 0)
goto invalid_quant_mat;
quant_mat[mpeg4_zigzag_8x8[i]] = quant_mat[mpeg4_zigzag_8x8[i - 1]];
} else
quant_mat[mpeg4_zigzag_8x8[i]] = val;
}
} else
memcpy (quant_mat, default_quant_mat, 64);
return TRUE;
error:
GST_WARNING ("error parsing quant matrix");
return FALSE;
invalid_quant_mat:
GST_WARNING ("the first value should be non zero");
goto error;
}
gboolean
mpeg4_util_parse_VOL (GstBuffer * buf, Mpeg4VisualObject * vo,
Mpeg4VideoObjectLayer * vol)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 video_object_layer_start_code;
guint8 aspect_ratio_info;
guint8 control_parameters;
guint8 not_8_bit;
/* set default values */
vol->verid = vo->verid;
vol->priority = vo->priority;
vol->low_delay = FALSE;
vol->chroma_format = 1;
vol->vbv_parameters = FALSE;
vol->quant_precision = 5;
vol->bits_per_pixel = 8;
vol->quarter_sample = FALSE;
/* start code prefix */
SKIP (&reader, 24);
READ_UINT8 (&reader, video_object_layer_start_code, 8);
if (!(video_object_layer_start_code >= MPEG4_PACKET_VOL_MIN &&
video_object_layer_start_code <= MPEG4_PACKET_VOL_MAX))
goto wrong_start_code;
READ_UINT8 (&reader, vol->random_accesible_vol, 1);
READ_UINT8 (&reader, vol->video_object_type_indication, 8);
READ_UINT8 (&reader, vol->is_object_layer_identifier, 1);
if (vol->is_object_layer_identifier) {
READ_UINT8 (&reader, vol->verid, 4);
READ_UINT8 (&reader, vol->priority, 3);
}
READ_UINT8 (&reader, aspect_ratio_info, 4);
if (aspect_ratio_info != 0xff)
mpeg4_util_par_from_info (aspect_ratio_info, &vol->par_n, &vol->par_d);
else {
READ_UINT8 (&reader, vol->par_n, 8);
CHECK_ALLOWED (vol->par_n, 1, 255);
READ_UINT8 (&reader, vol->par_d, 8);
CHECK_ALLOWED (vol->par_d, 1, 255);
}
READ_UINT8 (&reader, control_parameters, 1);
if (control_parameters) {
READ_UINT8 (&reader, vol->chroma_format, 2);
READ_UINT8 (&reader, vol->low_delay, 1);
READ_UINT8 (&reader, vol->vbv_parameters, 1);
if (vol->vbv_parameters) {
guint16 first_half, latter_half;
guint8 latter_part;
READ_UINT16 (&reader, first_half, 15);
SKIP (&reader, 1);
READ_UINT16 (&reader, latter_half, 15);
SKIP (&reader, 1);
vol->bit_rate = (first_half << 15) | latter_half;
READ_UINT16 (&reader, first_half, 15);
SKIP (&reader, 1);
READ_UINT8 (&reader, latter_part, 3);
SKIP (&reader, 1);
vol->vbv_buffer_size = (first_half << 15) | latter_part;
}
}
READ_UINT8 (&reader, vol->shape, 2);
if (vol->shape != 0x0)
goto invalid_shape;
/* marker_bit */
SKIP (&reader, 1);
READ_UINT16 (&reader, vol->vop_time_increment_resolution, 16);
CHECK_ALLOWED (vol->vop_time_increment_resolution, 1, G_MAXUINT16);
vol->vop_time_increment_bits =
g_bit_storage (vol->vop_time_increment_resolution);
/* marker_bit */
SKIP (&reader, 1);
READ_UINT8 (&reader, vol->fixed_vop_rate, 1);
if (vol->fixed_vop_rate)
READ_UINT16 (&reader, vol->fixed_vop_time_increment,
vol->vop_time_increment_bits);
/* marker bit */
SKIP (&reader, 1);
READ_UINT16 (&reader, vol->width, 13);
/* marker bit */
SKIP (&reader, 1);
READ_UINT16 (&reader, vol->height, 13);
/* marker bit */
SKIP (&reader, 1);
READ_UINT8 (&reader, vol->interlaced, 1);
READ_UINT8 (&reader, vol->obmc_disable, 1);
if (vol->verid == 0x1) {
READ_UINT8 (&reader, vol->sprite_enable, 1);
} else
READ_UINT8 (&reader, vol->sprite_enable, 2);
if (vol->sprite_enable != 0x0)
goto invalid_sprite_enable;
READ_UINT8 (&reader, not_8_bit, 1);
if (not_8_bit) {
READ_UINT8 (&reader, vol->quant_precision, 4);
CHECK_ALLOWED (vol->quant_precision, 3, 9);
READ_UINT8 (&reader, vol->bits_per_pixel, 4);
CHECK_ALLOWED (vol->bits_per_pixel, 4, 12);
}
READ_UINT8 (&reader, vol->quant_type, 1);
if (vol->quant_type) {
if (!mpeg4_util_parse_quant (&reader, vol->intra_quant_mat,
default_intra_quant_mat))
goto error;
if (!mpeg4_util_parse_quant (&reader, vol->non_intra_quant_mat,
default_non_intra_quant_mat))
goto error;
} else {
memset (&vol->intra_quant_mat, 0, 64);
memset (&vol->non_intra_quant_mat, 0, 64);
}
if (vol->verid != 0x1)
READ_UINT8 (&reader, vol->quarter_sample, 1);
READ_UINT8 (&reader, vol->complexity_estimation_disable, 1);
if (!vol->complexity_estimation_disable)
goto complexity_estimation_error;
READ_UINT8 (&reader, vol->resync_marker_disable, 1);
return TRUE;
error:
GST_WARNING ("error parsing \"Video Object Layer\"");
return FALSE;
wrong_start_code:
GST_WARNING ("got buffer with wrong start code");
goto error;
invalid_shape:
GST_WARNING ("we only support rectangular shape");
goto error;
invalid_sprite_enable:
GST_WARNING ("we only support sprite_enable == 0");
goto error;
complexity_estimation_error:
GST_WARNING ("don't support complexity estimation");
goto error;
}
gboolean
mpeg4_util_parse_VO (GstBuffer * buf, Mpeg4VisualObject * vo)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 visual_object_start_code;
guint8 is_visual_object_identifier;
/* set defualt values */
vo->verid = 0x1;
vo->priority = 1;
/* start code prefix */
SKIP (&reader, 24);
READ_UINT8 (&reader, visual_object_start_code, 8);
if (visual_object_start_code != MPEG4_PACKET_VO)
goto wrong_start_code;
READ_UINT8 (&reader, is_visual_object_identifier, 1);
if (is_visual_object_identifier) {
READ_UINT8 (&reader, vo->verid, 4);
READ_UINT8 (&reader, vo->priority, 3);
}
READ_UINT8 (&reader, vo->type, 4);
return TRUE;
wrong_start_code:
GST_WARNING ("got buffer with wrong start code");
return FALSE;
error:
GST_WARNING ("error parsing \"Visual Object\"");
return FALSE;
}
gboolean
mpeg4_util_parse_VOS (GstBuffer * buf, Mpeg4VisualObjectSequence * vos)
{
GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
guint8 visual_object_sequence_start_code;
/* start code prefix */
SKIP (&reader, 24);
READ_UINT8 (&reader, visual_object_sequence_start_code, 8);
if (visual_object_sequence_start_code != MPEG4_PACKET_VOS)
goto wrong_start_code;
READ_UINT8 (&reader, vos->profile_and_level_indication, 8);
return TRUE;
wrong_start_code:
GST_WARNING ("got buffer with wrong start code");
return FALSE;
error:
GST_WARNING ("error parsing \"Visual Object\"");
return FALSE;
}

View file

@ -1,179 +0,0 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MPEG4UTIL_H__
#define __GST_MPEG4UTIL_H__
#include <gst/gst.h>
#include <gst/base/gstbitreader.h>
#define CHECK_ALLOWED(val, min, max) { \
if (val < min || val > max) { \
GST_WARNING ("value not in allowed range. value: %d, range %d-%d", \
val, min, max); \
goto error; \
} \
}
#define READ_UINT8(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint8 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT16(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint16 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT32(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint32 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT64(reader, val, nbits) { \
if (!gst_bit_reader_get_bits_uint64 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint64, nbits: %d", nbits); \
goto error; \
} \
}
#define SKIP(reader, nbits) { \
if (!gst_bit_reader_skip (reader, nbits)) { \
GST_WARNING ("failed to skip nbits: %d", nbits); \
goto error; \
} \
}
typedef struct _Mpeg4VisualObjectSequence Mpeg4VisualObjectSequence;
typedef struct _Mpeg4VisualObject Mpeg4VisualObject;
typedef struct _Mpeg4VideoObjectLayer Mpeg4VideoObjectLayer;
typedef struct _Mpeg4GroupofVideoObjectPlane Mpeg4GroupofVideoObjectPlane;
typedef struct _Mpeg4VideoObjectPlane Mpeg4VideoObjectPlane;
#define MPEG4_PACKET_VOL_MIN 0x20
#define MPEG4_PACKET_VOL_MAX 0x2f
#define MPEG4_PACKET_VOS 0xb0
#define MPEG4_PACKET_EVOS 0xb1
#define MPEG4_PACKET_GOV 0xb3
#define MPEG4_PACKET_VO 0xb5
#define MPEG4_PACKET_VOP 0xb6
#define I_VOP 0x0
#define P_VOP 0x1
#define B_VOP 0x2
#define S_VOP 0x3
struct _Mpeg4VisualObjectSequence {
guint8 profile_and_level_indication;
};
struct _Mpeg4VisualObject {
guint8 verid;
guint8 priority;
guint8 type;
};
struct _Mpeg4VideoObjectLayer {
guint8 random_accesible_vol;
guint8 video_object_type_indication;
guint8 is_object_layer_identifier;
/* if is_object_layer_identifier */
guint8 verid;
guint8 priority;
guint8 par_n;
guint8 par_d;
guint8 chroma_format;
guint8 low_delay;
guint8 vbv_parameters;
/* if vbv_parameters */
guint32 bit_rate;
guint32 vbv_buffer_size;
guint8 shape;
guint16 vop_time_increment_resolution;
guint8 vop_time_increment_bits;
guint8 fixed_vop_rate;
/* if fixed_vop_rate */
guint16 fixed_vop_time_increment;
guint16 width;
guint16 height;
guint8 interlaced;
guint8 obmc_disable;
guint8 sprite_enable;
guint8 quant_precision;
guint8 bits_per_pixel;
guint8 quant_type;
guint8 intra_quant_mat[64];
guint8 non_intra_quant_mat[64];
guint8 quarter_sample;
guint8 complexity_estimation_disable;
guint8 resync_marker_disable;
};
struct _Mpeg4GroupofVideoObjectPlane {
guint8 hours;
guint8 minutes;
guint8 seconds;
guint8 closed;
guint8 broken_link;
};
struct _Mpeg4VideoObjectPlane {
guint8 coding_type;
guint8 modulo_time_base;
guint16 time_increment;
guint8 coded;
guint8 rounding_type;
guint8 intra_dc_vlc_thr;
guint8 top_field_first;
guint8 alternate_vertical_scan_flag;
guint16 quant;
guint8 fcode_forward;
guint8 fcode_backward;
};
gboolean mpeg4_util_parse_VOP (GstBuffer *buf, Mpeg4VideoObjectLayer *vol, Mpeg4VideoObjectPlane *vop);
gboolean mpeg4_util_parse_GOV (GstBuffer *buf, Mpeg4GroupofVideoObjectPlane *gov);
gboolean mpeg4_util_parse_VOL (GstBuffer *buf, Mpeg4VisualObject *vo, Mpeg4VideoObjectLayer *vol);
gboolean mpeg4_util_parse_VO (GstBuffer *buf, Mpeg4VisualObject *vo);
gboolean mpeg4_util_parse_VOS (GstBuffer *buf, Mpeg4VisualObjectSequence *vos);
#endif /* __GST_MPEG4UTIL_H__ */

View file

@ -18,8 +18,7 @@ AM_TESTS_ENVIRONMENT += \
decklinksrc \
decklinksink dtlssrtpdec dtlssrtpenc dvbsrc dvbbasebin dfbvideosink festival fluidsynth \
kmssink \
rsndvdbin rfbsrc vdpauyuvvideo vdpauvideoyuv \
vdpaumpegdec vdpaumpeg4dec vdpauh264dec vdpauvideopostprocess vdpausink neonhttpsrc waylandsink"
rsndvdbin rfbsrc neonhttpsrc waylandsink"
plugindir = $(libdir)/gstreamer-@GST_API_VERSION@